From f84805494cbf9cb6fb3da4351165a74f02497ab9 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 00:40:02 +0700 Subject: [PATCH 001/428] Fix date. --- Changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index 871b761f..bf3e62ea 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-?? 0.7.0b +## 2013-12-01 0.7.0b -------------------- * Update indentation support; * Python3 support; From 8a0bcc85c97b91ae93461193cf92357fd297fbd1 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 02:21:34 +0700 Subject: [PATCH 002/428] Update pymode --- Makefile | 16 ++++++-- pymode/{ => libs}/pylama/__init__.py | 2 +- pymode/{ => libs}/pylama/config.py | 6 ++- pymode/{ => libs}/pylama/core.py | 34 +++++++++++++---- pymode/{ => libs}/pylama/hook.py | 9 ++--- pymode/libs/pylama/libs/__init__.py | 1 + pymode/libs/pylama/libs/importlib.py | 38 +++++++++++++++++++ .../{pylama => libs/pylama/libs}/inirama.py | 0 pymode/libs/pylama/lint/__init__.py | 22 +++++++++++ pymode/libs/pylama/lint/extensions.py | 27 +++++++++++++ .../pylama/lint/pylama_mccabe/__init__.py | 0 .../pylama/lint/pylama_mccabe/mccabe.py | 0 .../pylama/lint/pylama_pep257/__init__.py | 0 .../pylama/lint/pylama_pep257/pep257.py | 0 .../pylama/lint/pylama_pep8/__init__.py | 0 .../pylama/lint/pylama_pep8/pep8.py | 0 .../pylama/lint/pylama_pyflakes/__init__.py | 0 .../lint/pylama_pyflakes/pyflakes/__init__.py | 0 .../lint/pylama_pyflakes/pyflakes/checker.py | 0 .../lint/pylama_pyflakes/pyflakes/messages.py | 0 .../pylama/lint/pylama_pylint/__init__.py | 2 +- .../pylama/lint/pylama_pylint/main.py | 2 +- .../pylama/lint/pylama_pylint/pylint.rc | 4 +- .../lint/pylama_pylint/pylint/__init__.py | 0 .../lint/pylama_pylint/pylint/__pkginfo__.py | 0 .../pylama_pylint/pylint/astroid/__init__.py | 0 .../pylint/astroid/__pkginfo__.py | 0 .../pylama_pylint/pylint/astroid/as_string.py | 0 .../pylama_pylint/pylint/astroid/bases.py | 0 .../pylama_pylint/pylint/astroid/builder.py | 0 .../pylint/astroid/exceptions.py | 0 .../pylama_pylint/pylint/astroid/inference.py | 0 .../pylama_pylint/pylint/astroid/manager.py | 0 .../pylama_pylint/pylint/astroid/mixins.py | 0 .../pylint/astroid/node_classes.py | 0 .../pylama_pylint/pylint/astroid/nodes.py | 0 .../pylama_pylint/pylint/astroid/protocols.py | 0 .../pylint/astroid/raw_building.py | 0 .../pylama_pylint/pylint/astroid/rebuilder.py | 0 .../pylint/astroid/scoped_nodes.py | 0 .../pylama_pylint/pylint/astroid/utils.py | 0 .../pylama_pylint/pylint/checkers/__init__.py | 0 .../pylama_pylint/pylint/checkers/base.py | 0 .../pylama_pylint/pylint/checkers/classes.py | 0 .../pylint/checkers/design_analysis.py | 0 .../pylint/checkers/exceptions.py | 0 .../pylama_pylint/pylint/checkers/format.py | 0 .../pylama_pylint/pylint/checkers/imports.py | 0 .../pylama_pylint/pylint/checkers/logging.py | 0 .../pylama_pylint/pylint/checkers/misc.py | 0 .../pylama_pylint/pylint/checkers/newstyle.py | 0 .../pylint/checkers/raw_metrics.py | 0 .../pylama_pylint/pylint/checkers/similar.py | 0 .../pylama_pylint/pylint/checkers/stdlib.py | 0 .../pylama_pylint/pylint/checkers/strings.py | 0 .../pylint/checkers/typecheck.py | 0 .../pylama_pylint/pylint/checkers/utils.py | 0 .../pylint/checkers/variables.py | 0 .../lint/pylama_pylint/pylint/config.py | 0 .../lint/pylama_pylint/pylint/interfaces.py | 0 .../pylama/lint/pylama_pylint/pylint/lint.py | 0 .../pylama_pylint/pylint/logilab/__init__.py | 0 .../pylint/logilab/common/__init__.py | 0 .../pylint/logilab/common/__pkginfo__.py | 0 .../pylint/logilab/common/changelog.py | 0 .../pylint/logilab/common/compat.py | 0 .../pylint/logilab/common/configuration.py | 0 .../pylint/logilab/common/decorators.py | 0 .../pylint/logilab/common/deprecation.py | 0 .../pylint/logilab/common/graph.py | 0 .../pylint/logilab/common/interface.py | 0 .../pylint/logilab/common/modutils.py | 0 .../pylint/logilab/common/optik_ext.py | 0 .../pylint/logilab/common/textutils.py | 0 .../pylint/logilab/common/tree.py | 0 .../logilab/common/ureports/__init__.py | 0 .../logilab/common/ureports/docbook_writer.py | 0 .../logilab/common/ureports/html_writer.py | 0 .../pylint/logilab/common/ureports/nodes.py | 0 .../logilab/common/ureports/text_writer.py | 0 .../pylint/logilab/common/visitor.py | 0 .../pylint/reporters/__init__.py | 0 .../pylint/reporters/guireporter.py | 0 .../pylama_pylint/pylint/reporters/html.py | 0 .../pylama_pylint/pylint/reporters/text.py | 0 .../pylama/lint/pylama_pylint/pylint/utils.py | 0 pymode/{ => libs}/pylama/main.py | 12 ++++-- pymode/{ => libs}/pylama/tasks.py | 4 +- pymode/{ => libs}/pylama/utils.py | 0 pymode/{libs => libs2}/rope/__init__.py | 0 pymode/{libs => libs2}/rope/base/__init__.py | 0 pymode/{libs => libs2}/rope/base/arguments.py | 0 pymode/{libs => libs2}/rope/base/ast.py | 0 pymode/{libs => libs2}/rope/base/astutils.py | 0 pymode/{libs => libs2}/rope/base/builtins.py | 0 pymode/{libs => libs2}/rope/base/change.py | 0 .../{libs => libs2}/rope/base/codeanalyze.py | 0 .../rope/base/default_config.py | 0 pymode/{libs => libs2}/rope/base/evaluate.py | 0 .../{libs => libs2}/rope/base/exceptions.py | 0 .../{libs => libs2}/rope/base/fscommands.py | 0 pymode/{libs => libs2}/rope/base/history.py | 0 pymode/{libs => libs2}/rope/base/libutils.py | 0 .../{libs => libs2}/rope/base/oi/__init__.py | 0 pymode/{libs => libs2}/rope/base/oi/doa.py | 0 .../{libs => libs2}/rope/base/oi/memorydb.py | 0 .../{libs => libs2}/rope/base/oi/objectdb.py | 0 .../rope/base/oi/objectinfo.py | 0 pymode/{libs => libs2}/rope/base/oi/runmod.py | 0 pymode/{libs => libs2}/rope/base/oi/soa.py | 0 pymode/{libs => libs2}/rope/base/oi/soi.py | 0 .../{libs => libs2}/rope/base/oi/transform.py | 0 pymode/{libs => libs2}/rope/base/prefs.py | 0 pymode/{libs => libs2}/rope/base/project.py | 0 pymode/{libs => libs2}/rope/base/pycore.py | 0 pymode/{libs => libs2}/rope/base/pynames.py | 0 .../{libs => libs2}/rope/base/pynamesdef.py | 0 pymode/{libs => libs2}/rope/base/pyobjects.py | 0 .../{libs => libs2}/rope/base/pyobjectsdef.py | 0 pymode/{libs => libs2}/rope/base/pyscopes.py | 0 .../rope/base/resourceobserver.py | 0 pymode/{libs => libs2}/rope/base/resources.py | 0 pymode/{libs => libs2}/rope/base/simplify.py | 0 pymode/{libs => libs2}/rope/base/stdmods.py | 0 .../{libs => libs2}/rope/base/taskhandle.py | 0 pymode/{libs => libs2}/rope/base/utils.py | 0 pymode/{libs => libs2}/rope/base/worder.py | 0 .../{libs => libs2}/rope/contrib/__init__.py | 0 .../rope/contrib/autoimport.py | 0 .../rope/contrib/changestack.py | 0 .../rope/contrib/codeassist.py | 0 .../rope/contrib/finderrors.py | 0 pymode/{libs => libs2}/rope/contrib/findit.py | 0 .../rope/contrib/fixmodnames.py | 0 .../{libs => libs2}/rope/contrib/fixsyntax.py | 0 .../{libs => libs2}/rope/contrib/generate.py | 0 .../{libs => libs2}/rope/refactor/__init__.py | 0 .../rope/refactor/change_signature.py | 0 .../rope/refactor/encapsulate_field.py | 0 .../{libs => libs2}/rope/refactor/extract.py | 0 .../rope/refactor/functionutils.py | 0 .../rope/refactor/importutils/__init__.py | 0 .../rope/refactor/importutils/actions.py | 0 .../rope/refactor/importutils/importinfo.py | 0 .../refactor/importutils/module_imports.py | 0 .../{libs => libs2}/rope/refactor/inline.py | 0 .../rope/refactor/introduce_factory.py | 0 .../rope/refactor/introduce_parameter.py | 0 .../rope/refactor/localtofield.py | 0 .../rope/refactor/method_object.py | 0 pymode/{libs => libs2}/rope/refactor/move.py | 0 .../rope/refactor/multiproject.py | 0 .../rope/refactor/occurrences.py | 0 .../rope/refactor/patchedast.py | 0 .../{libs => libs2}/rope/refactor/rename.py | 0 .../rope/refactor/restructure.py | 0 .../rope/refactor/similarfinder.py | 0 .../rope/refactor/sourceutils.py | 0 .../{libs => libs2}/rope/refactor/suites.py | 0 .../rope/refactor/topackage.py | 0 .../rope/refactor/usefunction.py | 0 .../rope/refactor/wildcards.py | 0 pymode/lint.py | 8 ++-- pymode/pylama/lint/__init__.py | 37 ------------------ pymode/rope.py | 25 +++++------- pymode/utils.py | 20 ++++++++-- 166 files changed, 180 insertions(+), 89 deletions(-) rename pymode/{ => libs}/pylama/__init__.py (92%) rename pymode/{ => libs}/pylama/config.py (97%) rename pymode/{ => libs}/pylama/core.py (85%) rename pymode/{ => libs}/pylama/hook.py (93%) create mode 100644 pymode/libs/pylama/libs/__init__.py create mode 100644 pymode/libs/pylama/libs/importlib.py rename pymode/{pylama => libs/pylama/libs}/inirama.py (100%) create mode 100644 pymode/libs/pylama/lint/__init__.py create mode 100644 pymode/libs/pylama/lint/extensions.py rename pymode/{ => libs}/pylama/lint/pylama_mccabe/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_mccabe/mccabe.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pep257/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pep257/pep257.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pep8/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pep8/pep8.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pyflakes/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pyflakes/pyflakes/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pyflakes/pyflakes/checker.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pyflakes/pyflakes/messages.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/__init__.py (91%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/main.py (96%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint.rc (82%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/__pkginfo__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/as_string.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/bases.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/builder.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/inference.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/manager.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/mixins.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/nodes.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/protocols.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/astroid/utils.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/base.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/classes.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/format.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/imports.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/logging.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/misc.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/similar.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/strings.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/utils.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/checkers/variables.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/config.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/interfaces.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/lint.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/reporters/__init__.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/reporters/html.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/reporters/text.py (100%) rename pymode/{ => libs}/pylama/lint/pylama_pylint/pylint/utils.py (100%) rename pymode/{ => libs}/pylama/main.py (88%) rename pymode/{ => libs}/pylama/tasks.py (95%) rename pymode/{ => libs}/pylama/utils.py (100%) rename pymode/{libs => libs2}/rope/__init__.py (100%) rename pymode/{libs => libs2}/rope/base/__init__.py (100%) rename pymode/{libs => libs2}/rope/base/arguments.py (100%) rename pymode/{libs => libs2}/rope/base/ast.py (100%) rename pymode/{libs => libs2}/rope/base/astutils.py (100%) rename pymode/{libs => libs2}/rope/base/builtins.py (100%) rename pymode/{libs => libs2}/rope/base/change.py (100%) rename pymode/{libs => libs2}/rope/base/codeanalyze.py (100%) rename pymode/{libs => libs2}/rope/base/default_config.py (100%) rename pymode/{libs => libs2}/rope/base/evaluate.py (100%) rename pymode/{libs => libs2}/rope/base/exceptions.py (100%) rename pymode/{libs => libs2}/rope/base/fscommands.py (100%) rename pymode/{libs => libs2}/rope/base/history.py (100%) rename pymode/{libs => libs2}/rope/base/libutils.py (100%) rename pymode/{libs => libs2}/rope/base/oi/__init__.py (100%) rename pymode/{libs => libs2}/rope/base/oi/doa.py (100%) rename pymode/{libs => libs2}/rope/base/oi/memorydb.py (100%) rename pymode/{libs => libs2}/rope/base/oi/objectdb.py (100%) rename pymode/{libs => libs2}/rope/base/oi/objectinfo.py (100%) rename pymode/{libs => libs2}/rope/base/oi/runmod.py (100%) rename pymode/{libs => libs2}/rope/base/oi/soa.py (100%) rename pymode/{libs => libs2}/rope/base/oi/soi.py (100%) rename pymode/{libs => libs2}/rope/base/oi/transform.py (100%) rename pymode/{libs => libs2}/rope/base/prefs.py (100%) rename pymode/{libs => libs2}/rope/base/project.py (100%) rename pymode/{libs => libs2}/rope/base/pycore.py (100%) rename pymode/{libs => libs2}/rope/base/pynames.py (100%) rename pymode/{libs => libs2}/rope/base/pynamesdef.py (100%) rename pymode/{libs => libs2}/rope/base/pyobjects.py (100%) rename pymode/{libs => libs2}/rope/base/pyobjectsdef.py (100%) rename pymode/{libs => libs2}/rope/base/pyscopes.py (100%) rename pymode/{libs => libs2}/rope/base/resourceobserver.py (100%) rename pymode/{libs => libs2}/rope/base/resources.py (100%) rename pymode/{libs => libs2}/rope/base/simplify.py (100%) rename pymode/{libs => libs2}/rope/base/stdmods.py (100%) rename pymode/{libs => libs2}/rope/base/taskhandle.py (100%) rename pymode/{libs => libs2}/rope/base/utils.py (100%) rename pymode/{libs => libs2}/rope/base/worder.py (100%) rename pymode/{libs => libs2}/rope/contrib/__init__.py (100%) rename pymode/{libs => libs2}/rope/contrib/autoimport.py (100%) rename pymode/{libs => libs2}/rope/contrib/changestack.py (100%) rename pymode/{libs => libs2}/rope/contrib/codeassist.py (100%) rename pymode/{libs => libs2}/rope/contrib/finderrors.py (100%) rename pymode/{libs => libs2}/rope/contrib/findit.py (100%) rename pymode/{libs => libs2}/rope/contrib/fixmodnames.py (100%) rename pymode/{libs => libs2}/rope/contrib/fixsyntax.py (100%) rename pymode/{libs => libs2}/rope/contrib/generate.py (100%) rename pymode/{libs => libs2}/rope/refactor/__init__.py (100%) rename pymode/{libs => libs2}/rope/refactor/change_signature.py (100%) rename pymode/{libs => libs2}/rope/refactor/encapsulate_field.py (100%) rename pymode/{libs => libs2}/rope/refactor/extract.py (100%) rename pymode/{libs => libs2}/rope/refactor/functionutils.py (100%) rename pymode/{libs => libs2}/rope/refactor/importutils/__init__.py (100%) rename pymode/{libs => libs2}/rope/refactor/importutils/actions.py (100%) rename pymode/{libs => libs2}/rope/refactor/importutils/importinfo.py (100%) rename pymode/{libs => libs2}/rope/refactor/importutils/module_imports.py (100%) rename pymode/{libs => libs2}/rope/refactor/inline.py (100%) rename pymode/{libs => libs2}/rope/refactor/introduce_factory.py (100%) rename pymode/{libs => libs2}/rope/refactor/introduce_parameter.py (100%) rename pymode/{libs => libs2}/rope/refactor/localtofield.py (100%) rename pymode/{libs => libs2}/rope/refactor/method_object.py (100%) rename pymode/{libs => libs2}/rope/refactor/move.py (100%) rename pymode/{libs => libs2}/rope/refactor/multiproject.py (100%) rename pymode/{libs => libs2}/rope/refactor/occurrences.py (100%) rename pymode/{libs => libs2}/rope/refactor/patchedast.py (100%) rename pymode/{libs => libs2}/rope/refactor/rename.py (100%) rename pymode/{libs => libs2}/rope/refactor/restructure.py (100%) rename pymode/{libs => libs2}/rope/refactor/similarfinder.py (100%) rename pymode/{libs => libs2}/rope/refactor/sourceutils.py (100%) rename pymode/{libs => libs2}/rope/refactor/suites.py (100%) rename pymode/{libs => libs2}/rope/refactor/topackage.py (100%) rename pymode/{libs => libs2}/rope/refactor/usefunction.py (100%) rename pymode/{libs => libs2}/rope/refactor/wildcards.py (100%) delete mode 100644 pymode/pylama/lint/__init__.py diff --git a/Makefile b/Makefile index 5df8213d..4b6e3924 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,7 @@ +PYMODE = $(CURDIR)/pymode +LIBS = $(PYMODE)/libs +PYLAMA = $(LIBS)/pylama + .PHONY: clean clean: find . -name "*.pyc" -delete @@ -9,6 +13,12 @@ test: .PHONY: pylama pylama: - rm -rf pylibs/pylama - cp -r ~/Dropbox/projects/pylama/pylama pylibs/pylama - cp -r ~/Dropbox/projects/pylama/plugins/pylama_pylint/pylama_pylint/ pylibs/pylama/lint/pylama_pylint + rm -rf $(PYLAMA) + make $(PYLAMA) + make $(PYLAMA)/lint/pylama_pylint + +$(PYLAMA): + cp -r ~/Dropbox/projects/pylama/pylama $(PYLAMA) + +$(PYLAMA)/lint/pylama_pylint: + cp -r ~/Dropbox/projects/pylama/plugins/pylama_pylint/pylama_pylint/ $(PYLAMA)/lint/pylama_pylint diff --git a/pymode/pylama/__init__.py b/pymode/libs/pylama/__init__.py similarity index 92% rename from pymode/pylama/__init__.py rename to pymode/libs/pylama/__init__.py index f73e6931..c5a5acfb 100644 --- a/pymode/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -version_info = 2, 0, 1 +version_info = 2, 0, 3 __version__ = version = '.'.join(map(str, version_info)) __project__ = __name__ diff --git a/pymode/pylama/config.py b/pymode/libs/pylama/config.py similarity index 97% rename from pymode/pylama/config.py rename to pymode/libs/pylama/config.py index 5db73ee0..d706b68c 100644 --- a/pymode/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -8,8 +8,8 @@ from . import version from .core import LOGGER, STREAM -from .inirama import Namespace -from .lint import LINTERS +from .libs.inirama import Namespace +from .lint.extensions import LINTERS #: A default checkers @@ -127,6 +127,8 @@ def parse_linters(csp_str): linter = LINTERS.get(name) if linter: result.append((name, linter)) + else: + logging.warn("Linter `%s` not found." % name) return result parser.add_argument( diff --git a/pymode/pylama/core.py b/pymode/libs/pylama/core.py similarity index 85% rename from pymode/pylama/core.py rename to pymode/libs/pylama/core.py index 5655557a..fa65e4ba 100644 --- a/pymode/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -5,7 +5,7 @@ """ import logging import re -from .lint import LINTERS +from .lint.extensions import LINTERS #: The skip pattern SKIP_PATTERN = re.compile(r'# *noqa\b', re.I).search @@ -20,7 +20,9 @@ LOGGER.addHandler(STREAM) -def run(path, ignore=None, select=None, linters=None, config=None, **meta): +def run( + path, ignore=None, select=None, linters=None, config=None, code=None, + **meta): """ Run a code checkers with given params. :return errors: list of dictionaries with error's information @@ -29,11 +31,9 @@ def run(path, ignore=None, select=None, linters=None, config=None, **meta): errors = [] linters = linters or LINTERS.items() params = dict(ignore=ignore, select=select) - code = None try: - with open(path, 'rU') as f: - code = f.read() + '\n\n' - + with CodeContext(code, path) as ctx: + code = ctx.code params = prepare_params( parse_modeline(code), config, ignore=ignore, select=select ) @@ -78,7 +78,7 @@ def run(path, ignore=None, select=None, linters=None, config=None, **meta): errors = [er for er in errors if filter_errors(er, **params)] - if code: + if code and errors: errors = filter_skiplines(code, errors) return sorted(errors, key=lambda x: x['lnum']) @@ -158,3 +158,23 @@ def filter_skiplines(code, errors): errors = [er for er in errors if not er['lnum'] in removed] return errors + + +class CodeContext(object): + + """ Read file if code is None. """ + + def __init__(self, code, path): + self.code = code + self.path = path + self._file = None + + def __enter__(self): + if self.code is None: + self._file = open(self.path, 'rU') + self.code = self._file.read() + '\n\n' + return self + + def __exit__(self): + if not self._file is None: + self._file.close() diff --git a/pymode/pylama/hook.py b/pymode/libs/pylama/hook.py similarity index 93% rename from pymode/pylama/hook.py rename to pymode/libs/pylama/hook.py index e35d26fe..a3cac2ec 100644 --- a/pymode/pylama/hook.py +++ b/pymode/libs/pylama/hook.py @@ -29,15 +29,13 @@ def run(command): def git_hook(): """ Run pylama after git commit. """ - from .main import check_files + _, files_modified, _ = run("git diff-index --cached --name-only HEAD") options = parse_options() setup_logger(options) - check_files( - [f for f in map(str, files_modified) if f.endswith('.py')], options - ) + check_files([f for f in map(str, files_modified)], options) def hg_hook(ui, repo, node=None, **kwargs): @@ -53,8 +51,7 @@ def hg_hook(ui, repo, node=None, **kwargs): if file_ in seen or not op.exists(file_): continue seen.add(file_) - if file_.endswith('.py'): - paths.append(file_) + paths.append(file_) options = parse_options() setup_logger(options) diff --git a/pymode/libs/pylama/libs/__init__.py b/pymode/libs/pylama/libs/__init__.py new file mode 100644 index 00000000..95fec137 --- /dev/null +++ b/pymode/libs/pylama/libs/__init__.py @@ -0,0 +1 @@ +""" Support libs. """ diff --git a/pymode/libs/pylama/libs/importlib.py b/pymode/libs/pylama/libs/importlib.py new file mode 100644 index 00000000..ad31a1ac --- /dev/null +++ b/pymode/libs/pylama/libs/importlib.py @@ -0,0 +1,38 @@ +"""Backport of importlib.import_module from 3.x.""" +# While not critical (and in no way guaranteed!), it would be nice to keep this +# code compatible with Python 2.3. +import sys + +def _resolve_name(name, package, level): + """Return the absolute name of the module to be imported.""" + if not hasattr(package, 'rindex'): + raise ValueError("'package' not set to a string") + dot = len(package) + for x in xrange(level, 1, -1): + try: + dot = package.rindex('.', 0, dot) + except ValueError: + raise ValueError("attempted relative import beyond top-level " + "package") + return "%s.%s" % (package[:dot], name) + + +def import_module(name, package=None): + """Import a module. + + The 'package' argument is required when performing a relative import. It + specifies the package to use as the anchor point from which to resolve the + relative import to an absolute import. + + """ + if name.startswith('.'): + if not package: + raise TypeError("relative imports require the 'package' argument") + level = 0 + for character in name: + if character != '.': + break + level += 1 + name = _resolve_name(name[level:], package, level) + __import__(name) + return sys.modules[name] diff --git a/pymode/pylama/inirama.py b/pymode/libs/pylama/libs/inirama.py similarity index 100% rename from pymode/pylama/inirama.py rename to pymode/libs/pylama/libs/inirama.py diff --git a/pymode/libs/pylama/lint/__init__.py b/pymode/libs/pylama/lint/__init__.py new file mode 100644 index 00000000..d5d75901 --- /dev/null +++ b/pymode/libs/pylama/lint/__init__.py @@ -0,0 +1,22 @@ +""" Custom module loader. """ + + +class Linter(object): # noqa + + """ Abstract class for linter plugin. """ + + @staticmethod + def allow(path): + """ Check path is relevant for linter. + + :return bool: + + """ + + return path.endswith('.py') + + @staticmethod + def run(path, **meta): + """ Method 'run' should be defined. """ + + raise NotImplementedError(__doc__) diff --git a/pymode/libs/pylama/lint/extensions.py b/pymode/libs/pylama/lint/extensions.py new file mode 100644 index 00000000..d5e92055 --- /dev/null +++ b/pymode/libs/pylama/lint/extensions.py @@ -0,0 +1,27 @@ +""" Load extensions. """ + +from os import listdir, path as op + + +CURDIR = op.dirname(__file__) +LINTERS = dict() +PREFIX = 'pylama_' + +try: + from importlib import import_module +except ImportError: + from ..libs.importlib import import_module + +for p in listdir(CURDIR): + if p.startswith(PREFIX) and op.isdir(op.join(CURDIR, p)): + name = p[len(PREFIX):] + module = import_module('.lint.%s%s' % (PREFIX, name), 'pylama') + LINTERS[name] = getattr(module, 'Linter')() + +try: + from pkg_resources import iter_entry_points + + for entry in iter_entry_points('pylama.linter'): + LINTERS[entry.name] = entry.load()() +except ImportError: + pass diff --git a/pymode/pylama/lint/pylama_mccabe/__init__.py b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_mccabe/__init__.py rename to pymode/libs/pylama/lint/pylama_mccabe/__init__.py diff --git a/pymode/pylama/lint/pylama_mccabe/mccabe.py b/pymode/libs/pylama/lint/pylama_mccabe/mccabe.py similarity index 100% rename from pymode/pylama/lint/pylama_mccabe/mccabe.py rename to pymode/libs/pylama/lint/pylama_mccabe/mccabe.py diff --git a/pymode/pylama/lint/pylama_pep257/__init__.py b/pymode/libs/pylama/lint/pylama_pep257/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pep257/__init__.py rename to pymode/libs/pylama/lint/pylama_pep257/__init__.py diff --git a/pymode/pylama/lint/pylama_pep257/pep257.py b/pymode/libs/pylama/lint/pylama_pep257/pep257.py similarity index 100% rename from pymode/pylama/lint/pylama_pep257/pep257.py rename to pymode/libs/pylama/lint/pylama_pep257/pep257.py diff --git a/pymode/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pep8/__init__.py rename to pymode/libs/pylama/lint/pylama_pep8/__init__.py diff --git a/pymode/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py similarity index 100% rename from pymode/pylama/lint/pylama_pep8/pep8.py rename to pymode/libs/pylama/lint/pylama_pep8/pep8.py diff --git a/pymode/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pyflakes/__init__.py rename to pymode/libs/pylama/lint/pylama_pyflakes/__init__.py diff --git a/pymode/pylama/lint/pylama_pyflakes/pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pyflakes/pyflakes/__init__.py rename to pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py diff --git a/pymode/pylama/lint/pylama_pyflakes/pyflakes/checker.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py similarity index 100% rename from pymode/pylama/lint/pylama_pyflakes/pyflakes/checker.py rename to pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py diff --git a/pymode/pylama/lint/pylama_pyflakes/pyflakes/messages.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py similarity index 100% rename from pymode/pylama/lint/pylama_pyflakes/pyflakes/messages.py rename to pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py diff --git a/pymode/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py similarity index 91% rename from pymode/pylama/lint/pylama_pylint/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/__init__.py index 49822d2c..ce270f54 100644 --- a/pymode/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,7 +4,7 @@ # ================== -__version__ = '0.1.0' +__version__ = '0.1.3' __project__ = 'pylama_pylint' __author__ = "horneds " __license__ = "BSD" diff --git a/pymode/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py similarity index 96% rename from pymode/pylama/lint/pylama_pylint/main.py rename to pymode/libs/pylama/lint/pylama_pylint/main.py index b38f7f2c..6132c2e8 100644 --- a/pymode/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -2,7 +2,7 @@ from os import path as op, environ -from .. import Linter as BaseLinter # noqa +from pylama.lint import Linter as BaseLinter # noqa PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc')) diff --git a/pymode/pylama/lint/pylama_pylint/pylint.rc b/pymode/libs/pylama/lint/pylama_pylint/pylint.rc similarity index 82% rename from pymode/pylama/lint/pylama_pylint/pylint.rc rename to pymode/libs/pylama/lint/pylama_pylint/pylint.rc index c58c4d0e..799c62f6 100644 --- a/pymode/pylama/lint/pylama_pylint/pylint.rc +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint.rc @@ -3,7 +3,6 @@ # http://pylint-messages.wikidot.com/all-codes # # C0103: Invalid name "%s" (should match %s) -# C0111: Missing docstring # E1101: %s %r has no %r member # R0901: Too many ancestors (%s/%s) # R0902: Too many instance attributes (%s/%s) @@ -15,11 +14,10 @@ # W0142: Used * or ** magic # W0221: Arguments number differs from %s method # W0232: Class has no __init__ method -# W0401: Wildcard import %s # W0613: Unused argument %r # W0631: Using possibly undefined loop variable %r # -disable = C0103,C0111,E1101,R0901,R0902,R0903,R0904,R0913,R0915,W0141,W0142,W0221,W0232,W0401,W0613,W0631 +disable = C0103,E1101,R0901,R0902,R0903,R0904,R0913,R0915,W0141,W0142,W0221,W0232,W0613,W0631 [TYPECHECK] generated-members = REQUEST,acl_users,aq_parent,objects,DoesNotExist,_meta,status_code,content,context diff --git a/pymode/pylama/lint/pylama_pylint/pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/__pkginfo__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/as_string.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/as_string.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/as_string.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/as_string.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/bases.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/bases.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/builder.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/builder.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/builder.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/builder.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/inference.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/inference.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/inference.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/inference.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/manager.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/manager.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/manager.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/manager.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/mixins.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/mixins.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/mixins.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/mixins.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/nodes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/nodes.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/nodes.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/nodes.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/protocols.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/protocols.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/protocols.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/protocols.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/astroid/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/utils.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/astroid/utils.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/utils.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/base.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/base.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/classes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/classes.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/format.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/format.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/imports.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/imports.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/logging.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/logging.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/misc.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/misc.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/similar.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/similar.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/strings.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/strings.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/utils.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/checkers/variables.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/config.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/config.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/config.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/interfaces.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/interfaces.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/lint.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/reporters/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/reporters/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/reporters/html.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/reporters/html.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/reporters/text.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/reporters/text.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py diff --git a/pymode/pylama/lint/pylama_pylint/pylint/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py similarity index 100% rename from pymode/pylama/lint/pylama_pylint/pylint/utils.py rename to pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py diff --git a/pymode/pylama/main.py b/pymode/libs/pylama/main.py similarity index 88% rename from pymode/pylama/main.py rename to pymode/libs/pylama/main.py index fff21aa8..591d6df2 100644 --- a/pymode/pylama/main.py +++ b/pymode/libs/pylama/main.py @@ -37,10 +37,7 @@ def shell(args=None, error=True): if op.isdir(options.path): paths = [] for root, _, files in walk(options.path): - paths += [ - op.relpath(op.join(root, f), CURDIR) - for f in files - if any(l.allow(f) for _, l in options.linters)] + paths += [op.relpath(op.join(root, f), CURDIR) for f in files] return check_files(paths, options, error=error) @@ -63,6 +60,13 @@ def check_files(paths, options, rootpath=None, error=True): work_paths = [] for path in paths: + + if not any(l.allow(path) for _, l in options.linters): + continue + + if not op.exists(path): + continue + if options.skip and any(p.match(path) for p in options.skip): LOGGER.info('Skip path: %s', path) continue diff --git a/pymode/pylama/tasks.py b/pymode/libs/pylama/tasks.py similarity index 95% rename from pymode/pylama/tasks.py rename to pymode/libs/pylama/tasks.py index 44ad290a..be9a34b2 100644 --- a/pymode/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -79,7 +79,7 @@ def async_check_files(paths, options, rootpath=None): return errors -def check_path(path, options=None, rootpath=None, **meta): +def check_path(path, options=None, rootpath=None, code=None, **meta): """ Check path. :return list: list of errors @@ -99,7 +99,7 @@ def check_path(path, options=None, rootpath=None, **meta): for error in run( path, ignore=options.ignore, select=options.select, linters=options.linters, complexity=options.complexity, - config=config, **meta): + config=config, code=code, **meta): try: error['rel'] = op.relpath(error['filename'], rootpath) error['col'] = error.get('col', 1) diff --git a/pymode/pylama/utils.py b/pymode/libs/pylama/utils.py similarity index 100% rename from pymode/pylama/utils.py rename to pymode/libs/pylama/utils.py diff --git a/pymode/libs/rope/__init__.py b/pymode/libs2/rope/__init__.py similarity index 100% rename from pymode/libs/rope/__init__.py rename to pymode/libs2/rope/__init__.py diff --git a/pymode/libs/rope/base/__init__.py b/pymode/libs2/rope/base/__init__.py similarity index 100% rename from pymode/libs/rope/base/__init__.py rename to pymode/libs2/rope/base/__init__.py diff --git a/pymode/libs/rope/base/arguments.py b/pymode/libs2/rope/base/arguments.py similarity index 100% rename from pymode/libs/rope/base/arguments.py rename to pymode/libs2/rope/base/arguments.py diff --git a/pymode/libs/rope/base/ast.py b/pymode/libs2/rope/base/ast.py similarity index 100% rename from pymode/libs/rope/base/ast.py rename to pymode/libs2/rope/base/ast.py diff --git a/pymode/libs/rope/base/astutils.py b/pymode/libs2/rope/base/astutils.py similarity index 100% rename from pymode/libs/rope/base/astutils.py rename to pymode/libs2/rope/base/astutils.py diff --git a/pymode/libs/rope/base/builtins.py b/pymode/libs2/rope/base/builtins.py similarity index 100% rename from pymode/libs/rope/base/builtins.py rename to pymode/libs2/rope/base/builtins.py diff --git a/pymode/libs/rope/base/change.py b/pymode/libs2/rope/base/change.py similarity index 100% rename from pymode/libs/rope/base/change.py rename to pymode/libs2/rope/base/change.py diff --git a/pymode/libs/rope/base/codeanalyze.py b/pymode/libs2/rope/base/codeanalyze.py similarity index 100% rename from pymode/libs/rope/base/codeanalyze.py rename to pymode/libs2/rope/base/codeanalyze.py diff --git a/pymode/libs/rope/base/default_config.py b/pymode/libs2/rope/base/default_config.py similarity index 100% rename from pymode/libs/rope/base/default_config.py rename to pymode/libs2/rope/base/default_config.py diff --git a/pymode/libs/rope/base/evaluate.py b/pymode/libs2/rope/base/evaluate.py similarity index 100% rename from pymode/libs/rope/base/evaluate.py rename to pymode/libs2/rope/base/evaluate.py diff --git a/pymode/libs/rope/base/exceptions.py b/pymode/libs2/rope/base/exceptions.py similarity index 100% rename from pymode/libs/rope/base/exceptions.py rename to pymode/libs2/rope/base/exceptions.py diff --git a/pymode/libs/rope/base/fscommands.py b/pymode/libs2/rope/base/fscommands.py similarity index 100% rename from pymode/libs/rope/base/fscommands.py rename to pymode/libs2/rope/base/fscommands.py diff --git a/pymode/libs/rope/base/history.py b/pymode/libs2/rope/base/history.py similarity index 100% rename from pymode/libs/rope/base/history.py rename to pymode/libs2/rope/base/history.py diff --git a/pymode/libs/rope/base/libutils.py b/pymode/libs2/rope/base/libutils.py similarity index 100% rename from pymode/libs/rope/base/libutils.py rename to pymode/libs2/rope/base/libutils.py diff --git a/pymode/libs/rope/base/oi/__init__.py b/pymode/libs2/rope/base/oi/__init__.py similarity index 100% rename from pymode/libs/rope/base/oi/__init__.py rename to pymode/libs2/rope/base/oi/__init__.py diff --git a/pymode/libs/rope/base/oi/doa.py b/pymode/libs2/rope/base/oi/doa.py similarity index 100% rename from pymode/libs/rope/base/oi/doa.py rename to pymode/libs2/rope/base/oi/doa.py diff --git a/pymode/libs/rope/base/oi/memorydb.py b/pymode/libs2/rope/base/oi/memorydb.py similarity index 100% rename from pymode/libs/rope/base/oi/memorydb.py rename to pymode/libs2/rope/base/oi/memorydb.py diff --git a/pymode/libs/rope/base/oi/objectdb.py b/pymode/libs2/rope/base/oi/objectdb.py similarity index 100% rename from pymode/libs/rope/base/oi/objectdb.py rename to pymode/libs2/rope/base/oi/objectdb.py diff --git a/pymode/libs/rope/base/oi/objectinfo.py b/pymode/libs2/rope/base/oi/objectinfo.py similarity index 100% rename from pymode/libs/rope/base/oi/objectinfo.py rename to pymode/libs2/rope/base/oi/objectinfo.py diff --git a/pymode/libs/rope/base/oi/runmod.py b/pymode/libs2/rope/base/oi/runmod.py similarity index 100% rename from pymode/libs/rope/base/oi/runmod.py rename to pymode/libs2/rope/base/oi/runmod.py diff --git a/pymode/libs/rope/base/oi/soa.py b/pymode/libs2/rope/base/oi/soa.py similarity index 100% rename from pymode/libs/rope/base/oi/soa.py rename to pymode/libs2/rope/base/oi/soa.py diff --git a/pymode/libs/rope/base/oi/soi.py b/pymode/libs2/rope/base/oi/soi.py similarity index 100% rename from pymode/libs/rope/base/oi/soi.py rename to pymode/libs2/rope/base/oi/soi.py diff --git a/pymode/libs/rope/base/oi/transform.py b/pymode/libs2/rope/base/oi/transform.py similarity index 100% rename from pymode/libs/rope/base/oi/transform.py rename to pymode/libs2/rope/base/oi/transform.py diff --git a/pymode/libs/rope/base/prefs.py b/pymode/libs2/rope/base/prefs.py similarity index 100% rename from pymode/libs/rope/base/prefs.py rename to pymode/libs2/rope/base/prefs.py diff --git a/pymode/libs/rope/base/project.py b/pymode/libs2/rope/base/project.py similarity index 100% rename from pymode/libs/rope/base/project.py rename to pymode/libs2/rope/base/project.py diff --git a/pymode/libs/rope/base/pycore.py b/pymode/libs2/rope/base/pycore.py similarity index 100% rename from pymode/libs/rope/base/pycore.py rename to pymode/libs2/rope/base/pycore.py diff --git a/pymode/libs/rope/base/pynames.py b/pymode/libs2/rope/base/pynames.py similarity index 100% rename from pymode/libs/rope/base/pynames.py rename to pymode/libs2/rope/base/pynames.py diff --git a/pymode/libs/rope/base/pynamesdef.py b/pymode/libs2/rope/base/pynamesdef.py similarity index 100% rename from pymode/libs/rope/base/pynamesdef.py rename to pymode/libs2/rope/base/pynamesdef.py diff --git a/pymode/libs/rope/base/pyobjects.py b/pymode/libs2/rope/base/pyobjects.py similarity index 100% rename from pymode/libs/rope/base/pyobjects.py rename to pymode/libs2/rope/base/pyobjects.py diff --git a/pymode/libs/rope/base/pyobjectsdef.py b/pymode/libs2/rope/base/pyobjectsdef.py similarity index 100% rename from pymode/libs/rope/base/pyobjectsdef.py rename to pymode/libs2/rope/base/pyobjectsdef.py diff --git a/pymode/libs/rope/base/pyscopes.py b/pymode/libs2/rope/base/pyscopes.py similarity index 100% rename from pymode/libs/rope/base/pyscopes.py rename to pymode/libs2/rope/base/pyscopes.py diff --git a/pymode/libs/rope/base/resourceobserver.py b/pymode/libs2/rope/base/resourceobserver.py similarity index 100% rename from pymode/libs/rope/base/resourceobserver.py rename to pymode/libs2/rope/base/resourceobserver.py diff --git a/pymode/libs/rope/base/resources.py b/pymode/libs2/rope/base/resources.py similarity index 100% rename from pymode/libs/rope/base/resources.py rename to pymode/libs2/rope/base/resources.py diff --git a/pymode/libs/rope/base/simplify.py b/pymode/libs2/rope/base/simplify.py similarity index 100% rename from pymode/libs/rope/base/simplify.py rename to pymode/libs2/rope/base/simplify.py diff --git a/pymode/libs/rope/base/stdmods.py b/pymode/libs2/rope/base/stdmods.py similarity index 100% rename from pymode/libs/rope/base/stdmods.py rename to pymode/libs2/rope/base/stdmods.py diff --git a/pymode/libs/rope/base/taskhandle.py b/pymode/libs2/rope/base/taskhandle.py similarity index 100% rename from pymode/libs/rope/base/taskhandle.py rename to pymode/libs2/rope/base/taskhandle.py diff --git a/pymode/libs/rope/base/utils.py b/pymode/libs2/rope/base/utils.py similarity index 100% rename from pymode/libs/rope/base/utils.py rename to pymode/libs2/rope/base/utils.py diff --git a/pymode/libs/rope/base/worder.py b/pymode/libs2/rope/base/worder.py similarity index 100% rename from pymode/libs/rope/base/worder.py rename to pymode/libs2/rope/base/worder.py diff --git a/pymode/libs/rope/contrib/__init__.py b/pymode/libs2/rope/contrib/__init__.py similarity index 100% rename from pymode/libs/rope/contrib/__init__.py rename to pymode/libs2/rope/contrib/__init__.py diff --git a/pymode/libs/rope/contrib/autoimport.py b/pymode/libs2/rope/contrib/autoimport.py similarity index 100% rename from pymode/libs/rope/contrib/autoimport.py rename to pymode/libs2/rope/contrib/autoimport.py diff --git a/pymode/libs/rope/contrib/changestack.py b/pymode/libs2/rope/contrib/changestack.py similarity index 100% rename from pymode/libs/rope/contrib/changestack.py rename to pymode/libs2/rope/contrib/changestack.py diff --git a/pymode/libs/rope/contrib/codeassist.py b/pymode/libs2/rope/contrib/codeassist.py similarity index 100% rename from pymode/libs/rope/contrib/codeassist.py rename to pymode/libs2/rope/contrib/codeassist.py diff --git a/pymode/libs/rope/contrib/finderrors.py b/pymode/libs2/rope/contrib/finderrors.py similarity index 100% rename from pymode/libs/rope/contrib/finderrors.py rename to pymode/libs2/rope/contrib/finderrors.py diff --git a/pymode/libs/rope/contrib/findit.py b/pymode/libs2/rope/contrib/findit.py similarity index 100% rename from pymode/libs/rope/contrib/findit.py rename to pymode/libs2/rope/contrib/findit.py diff --git a/pymode/libs/rope/contrib/fixmodnames.py b/pymode/libs2/rope/contrib/fixmodnames.py similarity index 100% rename from pymode/libs/rope/contrib/fixmodnames.py rename to pymode/libs2/rope/contrib/fixmodnames.py diff --git a/pymode/libs/rope/contrib/fixsyntax.py b/pymode/libs2/rope/contrib/fixsyntax.py similarity index 100% rename from pymode/libs/rope/contrib/fixsyntax.py rename to pymode/libs2/rope/contrib/fixsyntax.py diff --git a/pymode/libs/rope/contrib/generate.py b/pymode/libs2/rope/contrib/generate.py similarity index 100% rename from pymode/libs/rope/contrib/generate.py rename to pymode/libs2/rope/contrib/generate.py diff --git a/pymode/libs/rope/refactor/__init__.py b/pymode/libs2/rope/refactor/__init__.py similarity index 100% rename from pymode/libs/rope/refactor/__init__.py rename to pymode/libs2/rope/refactor/__init__.py diff --git a/pymode/libs/rope/refactor/change_signature.py b/pymode/libs2/rope/refactor/change_signature.py similarity index 100% rename from pymode/libs/rope/refactor/change_signature.py rename to pymode/libs2/rope/refactor/change_signature.py diff --git a/pymode/libs/rope/refactor/encapsulate_field.py b/pymode/libs2/rope/refactor/encapsulate_field.py similarity index 100% rename from pymode/libs/rope/refactor/encapsulate_field.py rename to pymode/libs2/rope/refactor/encapsulate_field.py diff --git a/pymode/libs/rope/refactor/extract.py b/pymode/libs2/rope/refactor/extract.py similarity index 100% rename from pymode/libs/rope/refactor/extract.py rename to pymode/libs2/rope/refactor/extract.py diff --git a/pymode/libs/rope/refactor/functionutils.py b/pymode/libs2/rope/refactor/functionutils.py similarity index 100% rename from pymode/libs/rope/refactor/functionutils.py rename to pymode/libs2/rope/refactor/functionutils.py diff --git a/pymode/libs/rope/refactor/importutils/__init__.py b/pymode/libs2/rope/refactor/importutils/__init__.py similarity index 100% rename from pymode/libs/rope/refactor/importutils/__init__.py rename to pymode/libs2/rope/refactor/importutils/__init__.py diff --git a/pymode/libs/rope/refactor/importutils/actions.py b/pymode/libs2/rope/refactor/importutils/actions.py similarity index 100% rename from pymode/libs/rope/refactor/importutils/actions.py rename to pymode/libs2/rope/refactor/importutils/actions.py diff --git a/pymode/libs/rope/refactor/importutils/importinfo.py b/pymode/libs2/rope/refactor/importutils/importinfo.py similarity index 100% rename from pymode/libs/rope/refactor/importutils/importinfo.py rename to pymode/libs2/rope/refactor/importutils/importinfo.py diff --git a/pymode/libs/rope/refactor/importutils/module_imports.py b/pymode/libs2/rope/refactor/importutils/module_imports.py similarity index 100% rename from pymode/libs/rope/refactor/importutils/module_imports.py rename to pymode/libs2/rope/refactor/importutils/module_imports.py diff --git a/pymode/libs/rope/refactor/inline.py b/pymode/libs2/rope/refactor/inline.py similarity index 100% rename from pymode/libs/rope/refactor/inline.py rename to pymode/libs2/rope/refactor/inline.py diff --git a/pymode/libs/rope/refactor/introduce_factory.py b/pymode/libs2/rope/refactor/introduce_factory.py similarity index 100% rename from pymode/libs/rope/refactor/introduce_factory.py rename to pymode/libs2/rope/refactor/introduce_factory.py diff --git a/pymode/libs/rope/refactor/introduce_parameter.py b/pymode/libs2/rope/refactor/introduce_parameter.py similarity index 100% rename from pymode/libs/rope/refactor/introduce_parameter.py rename to pymode/libs2/rope/refactor/introduce_parameter.py diff --git a/pymode/libs/rope/refactor/localtofield.py b/pymode/libs2/rope/refactor/localtofield.py similarity index 100% rename from pymode/libs/rope/refactor/localtofield.py rename to pymode/libs2/rope/refactor/localtofield.py diff --git a/pymode/libs/rope/refactor/method_object.py b/pymode/libs2/rope/refactor/method_object.py similarity index 100% rename from pymode/libs/rope/refactor/method_object.py rename to pymode/libs2/rope/refactor/method_object.py diff --git a/pymode/libs/rope/refactor/move.py b/pymode/libs2/rope/refactor/move.py similarity index 100% rename from pymode/libs/rope/refactor/move.py rename to pymode/libs2/rope/refactor/move.py diff --git a/pymode/libs/rope/refactor/multiproject.py b/pymode/libs2/rope/refactor/multiproject.py similarity index 100% rename from pymode/libs/rope/refactor/multiproject.py rename to pymode/libs2/rope/refactor/multiproject.py diff --git a/pymode/libs/rope/refactor/occurrences.py b/pymode/libs2/rope/refactor/occurrences.py similarity index 100% rename from pymode/libs/rope/refactor/occurrences.py rename to pymode/libs2/rope/refactor/occurrences.py diff --git a/pymode/libs/rope/refactor/patchedast.py b/pymode/libs2/rope/refactor/patchedast.py similarity index 100% rename from pymode/libs/rope/refactor/patchedast.py rename to pymode/libs2/rope/refactor/patchedast.py diff --git a/pymode/libs/rope/refactor/rename.py b/pymode/libs2/rope/refactor/rename.py similarity index 100% rename from pymode/libs/rope/refactor/rename.py rename to pymode/libs2/rope/refactor/rename.py diff --git a/pymode/libs/rope/refactor/restructure.py b/pymode/libs2/rope/refactor/restructure.py similarity index 100% rename from pymode/libs/rope/refactor/restructure.py rename to pymode/libs2/rope/refactor/restructure.py diff --git a/pymode/libs/rope/refactor/similarfinder.py b/pymode/libs2/rope/refactor/similarfinder.py similarity index 100% rename from pymode/libs/rope/refactor/similarfinder.py rename to pymode/libs2/rope/refactor/similarfinder.py diff --git a/pymode/libs/rope/refactor/sourceutils.py b/pymode/libs2/rope/refactor/sourceutils.py similarity index 100% rename from pymode/libs/rope/refactor/sourceutils.py rename to pymode/libs2/rope/refactor/sourceutils.py diff --git a/pymode/libs/rope/refactor/suites.py b/pymode/libs2/rope/refactor/suites.py similarity index 100% rename from pymode/libs/rope/refactor/suites.py rename to pymode/libs2/rope/refactor/suites.py diff --git a/pymode/libs/rope/refactor/topackage.py b/pymode/libs2/rope/refactor/topackage.py similarity index 100% rename from pymode/libs/rope/refactor/topackage.py rename to pymode/libs2/rope/refactor/topackage.py diff --git a/pymode/libs/rope/refactor/usefunction.py b/pymode/libs2/rope/refactor/usefunction.py similarity index 100% rename from pymode/libs/rope/refactor/usefunction.py rename to pymode/libs2/rope/refactor/usefunction.py diff --git a/pymode/libs/rope/refactor/wildcards.py b/pymode/libs2/rope/refactor/wildcards.py similarity index 100% rename from pymode/libs/rope/refactor/wildcards.py rename to pymode/libs2/rope/refactor/wildcards.py diff --git a/pymode/lint.py b/pymode/lint.py index 28dc9ab1..f3958c40 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -9,8 +9,8 @@ def code_check(): """ Run pylama and check current file. """ - from .pylama.main import parse_options - from .pylama.tasks import check_path + from pylama.main import parse_options + from pylama.tasks import check_path import json b = vim.current.buffer @@ -31,8 +31,10 @@ def code_check(): vim.command('return') return False + code = '\n'.join(vim.current.buffer) + with silence_stderr(): - errors = check_path(path, options=options) + errors = check_path(path, options=options, code=code) sort_rules = vim.eval('g:pymode_lint_sort') def sort(e): diff --git a/pymode/pylama/lint/__init__.py b/pymode/pylama/lint/__init__.py deleted file mode 100644 index 665fc22b..00000000 --- a/pymode/pylama/lint/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -""" Custom module loader. """ - - -class Linter(object): # noqa - - """ Abstract class for linter plugin. """ - - @staticmethod - def allow(path): - """ Check path is relevant for linter. - - :return bool: - - """ - - return path.endswith('.py') - - @staticmethod - def run(path, **meta): - """ Method 'run' should be defined. """ - - raise NotImplementedError(__doc__) - - -LINTERS = dict() - -from .pylama_mccabe import Linter as MccabeLinter -from .pylama_pep8 import Linter as Pep8Linter -from .pylama_pep257 import Linter as Pep257Linter -from .pylama_pyflakes import Linter as PyflakesLinter -from .pylama_pylint import Linter as PylintLinter - -LINTERS['mccabe'] = MccabeLinter() -LINTERS['pep8'] = Pep8Linter() -LINTERS['pep257'] = Pep257Linter() -LINTERS['pyflakes'] = PyflakesLinter() -LINTERS['pylint'] = PylintLinter() diff --git a/pymode/rope.py b/pymode/rope.py index 7744a025..d949cd59 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,28 +1,23 @@ """ Rope support in pymode. """ - from __future__ import absolute_import, print_function -import vim # noqa -import site -import os.path -import sys -import re import json import multiprocessing -from .utils import ( - pymode_message, PY2, pymode_error, pymode_input, pymode_inputlist, - pymode_confirm, catch_and_print_exceptions) - -if PY2: - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs')) -else: - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs3')) +import os.path +import re +import site +import sys from rope.base import project, libutils, exceptions, change, worder # noqa from rope.base.fscommands import FileSystemCommands # noqa +from rope.base.taskhandle import TaskHandle # noqa from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa from rope.refactor import ModuleToPackage, ImportOrganizer, rename, extract, inline, usefunction, move, change_signature, importutils # noqa -from rope.base.taskhandle import TaskHandle # noqa + +import vim # noqa +from .utils import ( + pymode_message, pymode_error, pymode_input, pymode_inputlist, + pymode_confirm, catch_and_print_exceptions) def get_assist_params(cursor=None, base=''): diff --git a/pymode/utils.py b/pymode/utils.py index 964b9bfe..728387ff 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -1,17 +1,18 @@ """ Pymode utils. """ -import sys - import json +import os.path +import sys +import threading +from contextlib import contextmanager import vim # noqa + try: from StringIO import StringIO except ImportError: from io import StringIO -from contextlib import contextmanager -import threading PY2 = sys.version_info[0] == 2 @@ -146,3 +147,14 @@ def silence_stderr(): with threading.Lock(): sys.stderr = stderr + + +def patch_paths(): + """ Function description. """ + + sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs')) + + if PY2: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs2')) + else: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs3')) From b5f1ef0be4b145a88bceaf0ebcafec95f5c09c3f Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 02:21:58 +0700 Subject: [PATCH 003/428] Dont save code before checking --- autoload/pymode/lint.vim | 4 ---- ftplugin/python/pymode.vim | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index ba5a7ccc..8d0cd8a8 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -46,10 +46,6 @@ fun! pymode#lint#check() "{{{ let b:pymode_errors = {} - if !pymode#save() - return 0 - endif - call pymode#wide_message('Code checking is running ...') PymodePython code_check() diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index e8488149..a64787c5 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -7,6 +7,8 @@ if !pymode#default('g:pymode_init', 1) call pymode#init(expand(':p:h:h:h'), g:pymode_paths) call pymode#virtualenv#init() call pymode#breakpoint#init() + PymodePython from pymode.utils import patch_paths + PymodePython patch_paths() endif augroup pymode From df0d73d06f20c72c5e3d7dd5bf7401559adbed96 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 02:22:41 +0700 Subject: [PATCH 004/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index bf3e62ea..cd9e49cc 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-01 0.7.0b +## 2013-12-01 0.7.1b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index 946f4170..7ed8f49a 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.0b + Version: 0.7.1b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index f3ded350..f4c182ea 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.0b" +let g:pymode_version = "0.7.1b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From 892aa7df075e798f7edd763b819c369aea052aaa Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 03:17:00 +0700 Subject: [PATCH 005/428] Fix documentation --- doc/pymode.txt | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 7ed8f49a..fc47986f 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -334,17 +334,20 @@ Definitions for |signs| Pymode support Rope refactoring operations, code completion and code assists. Commands: -*:PymodeRopeAutoImport* -- Autoimport used modules -*:PymodeRopeModuleToPackage* -- Convert current module to package -*:PymodeRopeNewProject* -- Open new Rope project in current working directory -*:PymodeRopeRedo* -- Redo changes from last refactoring -*:PymodeRopeRegenerate* -- Regenerate the project cache -*:PymodeRopeRenameModule* -- Rename current module -*:PymodeRopeUndo* -- Undo changes from last refactoring +|:PymodeRopeAutoImport| -- Resolve import for element under cursor +|:PymodeRopeModuleToPackage| -- Convert current module to package +|:PymodeRopeNewProject| -- Open new Rope project in current working directory +|:PymodeRopeRedo| -- Redo changes from last refactoring +|:PymodeRopeRegenerate| -- Regenerate the project cache +|:PymodeRopeRenameModule| -- Rename current module +|:PymodeRopeUndo| -- Undo changes from last refactoring .roperoject Folder ~ *.ropeproject* +*:PymodeRopeNewProject* -- Open new Rope project in current working directory +*:PymodeRopeRegenerate* -- Regenerate the project cache + Rope uses a folder inside projects for holding project configuration and data. Its default name is `.ropeproject` If the folder doesnt exists in current working directory, parent folders will be checked. @@ -452,6 +455,8 @@ Keymap for rename current module *'g:pymode_rope_rename_module_bind'* Imports ~ +*:PymodeRopeAutoImport* -- Resolve import for element under cursor + Organize imports sorts imports, too. It does that according to PEP8. Unused imports will be dropped. Keymap *'g:pymode_rope_organize_imports_bind'* From b65f6691413f0ec16bc404bfeb5436a5ff401c8c Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 15:47:11 +0700 Subject: [PATCH 006/428] Fix Rope --- Makefile | 1 + autoload/pymode/lint.vim | 47 ++++++++++++----------- autoload/pymode/tools/loclist.vim | 62 +++++++++++++++++++++++++++++++ autoload/pymode/tools/signs.vim | 57 ++++++++++++++++++++++++++++ plugin/pymode.vim | 12 +----- pymode/lint.py | 15 +++++--- pymode/rope.py | 47 ++++++++++++++++++----- pymode/utils.py | 14 ++++++- t/rope.vim | 31 ++++++++++++++++ t/trouble.vim | 1 - 10 files changed, 233 insertions(+), 54 deletions(-) create mode 100644 autoload/pymode/tools/loclist.vim create mode 100644 autoload/pymode/tools/signs.vim create mode 100644 t/rope.vim diff --git a/Makefile b/Makefile index 4b6e3924..42016f43 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ clean: .PHONY: test test: bundle install + rm -rf $(CURDIR)/.ropeproject rake test .PHONY: pylama diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index 8d0cd8a8..6048a7ef 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -1,5 +1,9 @@ PymodePython from pymode.lint import code_check +call pymode#tools#signs#init() +call pymode#tools#loclist#init() + + fun! pymode#lint#auto() "{{{ if !pymode#save() return 0 @@ -13,7 +17,8 @@ endfunction "}}} fun! pymode#lint#show_errormessage() "{{{ - if empty(b:pymode_errors) + let loclist = g:PymodeLocList.current() + if loclist.is_empty() return endif @@ -22,8 +27,8 @@ fun! pymode#lint#show_errormessage() "{{{ return endif let b:pymode_error_line = l - if has_key(b:pymode_errors, l) - call pymode#wide_message(b:pymode_errors[l]) + if has_key(loclist._messages, l) + call pymode#wide_message(loclist._messages[l]) else echo endif @@ -39,47 +44,39 @@ fun! pymode#lint#toggle() "{{{ end endfunction "}}} + fun! pymode#lint#check() "{{{ " DESC: Run checkers on current file. " if !g:pymode_lint | return | endif - let b:pymode_errors = {} + let loclist = g:PymodeLocList.current() + + let b:pymode_error_line = -1 + + call loclist.clear() call pymode#wide_message('Code checking is running ...') PymodePython code_check() - let errors = getqflist() - if empty(errors) + if loclist.is_empty() call pymode#wide_message('Code checking is completed. No errors found.') endif if g:pymode_lint_cwindow + call setqflist(loclist._loclist) call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_minheight, 0) endif - if g:pymode_lint_signs - for item in b:pymode_signs - execute printf('sign unplace %d buffer=%d', item.lnum, item.bufnr) - endfor - let b:pymode_lint_signs = [] - for item in filter(errors, 'v:val.bufnr != ""') - call add(b:pymode_signs, item) - execute printf('sign place %d line=%d name=%s buffer=%d', item.lnum, item.lnum, "Pymode".item.type, item.bufnr) - endfor - endif - - for item in errors - let b:pymode_errors[item.lnum] = item.text - endfor + call g:PymodeSigns.refresh(loclist) - let b:pymode_error_line = -1 call pymode#lint#show_errormessage() - call pymode#wide_message('Found errors and warnings: ' . len(errors)) + call pymode#wide_message('Found errors and warnings: ' . len(loclist._loclist)) endfunction " }}} + fun! pymode#lint#tick_queue() "{{{ python import time @@ -96,11 +93,13 @@ fun! pymode#lint#tick_queue() "{{{ endif endfunction "}}} + fun! pymode#lint#stop() "{{{ au! pymode CursorHold -endfunction +endfunction "}}} + fun! pymode#lint#start() "{{{ au! pymode CursorHold call pymode#lint#tick_queue() call pymode#lint#tick_queue() -endfunction +endfunction "}}} diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim new file mode 100644 index 00000000..ca35abab --- /dev/null +++ b/autoload/pymode/tools/loclist.vim @@ -0,0 +1,62 @@ +let g:PymodeLocList= {} + + +fun! pymode#tools#loclist#init() "{{{ + return +endfunction "}}} + + +fun! g:PymodeLocList.init(raw_list) "{{{ + let obj = copy(self) + let loc_list = filter(copy(a:raw_list), 'v:val["valid"] == 1') + call obj.clear() + return obj +endfunction "}}} + + +fun! g:PymodeLocList.current() "{{{ + if !exists("b:pymode_loclist") + let b:pymode_loclist = g:PymodeLocList.init([]) + endif + return b:pymode_loclist +endfunction "}}} + + +fun! g:PymodeLocList.is_empty() "{{{ + return empty(self._loclist) +endfunction "}}} + + +fun! g:PymodeLocList.clear() "{{{ + let self._loclist = [] + let self._messages = {} +endfunction "}}} + + +fun! g:PymodeLocList.extend(raw_list) "{{{ + call extend(self._loclist, a:raw_list) + for issue in a:raw_list + let self._messages[issue.lnum] = issue.text + endfor + return self +endfunction "}}} + + +fun! g:PymodeLocList.filter(filters) "{{{ + let loclist = [] + for error in self._loclist + let passes_filters = 1 + for key in keys(a:filters) + if get(error, key, '') !=? a:filters[key] + let passes_filters = 0 + break + endif + endfor + + if passes_filters + call add(loclist, error) + endif + + endfor + return loclist +endfunction "}}} diff --git a/autoload/pymode/tools/signs.vim b/autoload/pymode/tools/signs.vim new file mode 100644 index 00000000..dbac8c39 --- /dev/null +++ b/autoload/pymode/tools/signs.vim @@ -0,0 +1,57 @@ +let g:PymodeSigns = {} + + +fun! pymode#tools#signs#init() "{{{ + call g:PymodeSigns.setup() +endfunction "}}} + + +fun! g:PymodeSigns.enabled() "{{{ + return (g:pymode_lint_signs && has('signs')) +endfunction "}}} + + +fun! g:PymodeSigns.setup() "{{{ + if self.enabled() + execute 'sign define PymodeW text=' . g:pymode_lint_todo_symbol . " texthl=Todo" + execute 'sign define PymodeC text=' . g:pymode_lint_comment_symbol . " texthl=Comment" + execute 'sign define PymodeR text=' . g:pymode_lint_visual_symbol . " texthl=Visual" + execute 'sign define PymodeE text=' . g:pymode_lint_error_symbol . " texthl=Error" + execute 'sign define PymodeI text=' . g:pymode_lint_info_symbol . " texthl=Info" + execute 'sign define PymodeF text=' . g:pymode_lint_pyflakes_symbol . " texthl=Info" + endif + let self._sign_ids = [] + let self._next_id = 10000 + let self._messages = {} +endfunction "}}} + + +fun! g:PymodeSigns.refresh(loclist) "{{{ + if self.enabled() + call self.clear() + call self.place(a:loclist) + endif +endfunction "}}} + + +fun! g:PymodeSigns.clear() "{{{ + let ids = copy(self._sign_ids) + for i in ids + execute "sign unplace " . i + call remove(self._sign_ids, index(self._sign_ids, i)) + endfor +endfunction "}}} + + +fun! g:PymodeSigns.place(loclist) "{{{ + let seen = {} + let buf = bufnr('') + for issue in a:loclist._loclist + if !has_key(seen, issue.lnum) + let seen[issue.lnum] = 1 + call add(self._sign_ids, self._next_id) + execute printf('sign place %d line=%d name=%s buffer=%d', self._next_id, issue.lnum, "Pymode".issue.type[0], buf) + let self._next_id += 1 + endif + endfor +endfunction "}}} diff --git a/plugin/pymode.vim b/plugin/pymode.vim index f4c182ea..78e02d39 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -6,6 +6,7 @@ com! PymodeTroubleshooting call pymode#troubleshooting#test() " Enable pymode by default :) call pymode#default('g:pymode', 1) +call pymode#default('g:pymode_debug', 0) " DESC: Disable script loading if !g:pymode || &cp @@ -119,17 +120,6 @@ call pymode#default("g:pymode_lint_error_symbol", "EE") call pymode#default("g:pymode_lint_info_symbol", "II") call pymode#default("g:pymode_lint_pyflakes_symbol", "FF") -if g:pymode_lint_signs && has('signs') - - execute 'sign define PymodeW text=' . g:pymode_lint_todo_symbol . " texthl=Todo" - execute 'sign define PymodeC text=' . g:pymode_lint_comment_symbol . " texthl=Comment" - execute 'sign define PymodeR text=' . g:pymode_lint_visual_symbol . " texthl=Visual" - execute 'sign define PymodeE text=' . g:pymode_lint_error_symbol . " texthl=Error" - execute 'sign define PymodeI text=' . g:pymode_lint_info_symbol . " texthl=Info" - execute 'sign define PymodeF text=' . g:pymode_lint_pyflakes_symbol . " texthl=Info" - -endif - " }}} " SET/UNSET BREAKPOINTS {{{ diff --git a/pymode/lint.py b/pymode/lint.py index f3958c40..a42f8b6b 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -7,7 +7,11 @@ def code_check(): - """ Run pylama and check current file. """ + """ Run pylama and check current file. + + :return bool: + + """ from pylama.main import parse_options from pylama.tasks import check_path @@ -37,15 +41,14 @@ def code_check(): errors = check_path(path, options=options, code=code) sort_rules = vim.eval('g:pymode_lint_sort') - def sort(e): + def __sort(e): try: - print(e.get('type')) return sort_rules.index(e.get('type')) except ValueError: return 999 if sort_rules: - print(sort_rules) - errors = sorted(errors, key=sort) + errors = sorted(errors, key=__sort) - vim.command('call setqflist(%s)' % json.dumps(errors)) + vim.command( + 'call g:PymodeLocList.current().extend(%s)' % json.dumps(errors)) diff --git a/pymode/rope.py b/pymode/rope.py index d949cd59..5ff8d0ed 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -17,7 +17,7 @@ import vim # noqa from .utils import ( pymode_message, pymode_error, pymode_input, pymode_inputlist, - pymode_confirm, catch_and_print_exceptions) + pymode_confirm, catch_and_print_exceptions, debug) def get_assist_params(cursor=None, base=''): @@ -63,6 +63,7 @@ def look_ropeproject(path): p = new_p +@catch_and_print_exceptions def completions(): """ Search completions. """ @@ -158,14 +159,17 @@ def goto(): found_resource, line = codeassist.get_definition_location( ctx.project, source, offset, ctx.resource, maxfixes=3) + if not found_resource: + pymode_error('Definition not found') + return + if not os.path.abspath(found_resource.path) == vim.current.buffer.name: vim.command("%s +%s %s" % ( ctx.options.get('goto_definition_cmd'), line, found_resource.path)) else: - vim.current.window.cursor = ( - line, int(vim.eval('indent(%s)' % line))) + vim.command('normal %sggzz' % line) @catch_and_print_exceptions @@ -330,7 +334,6 @@ def autoimport(): pymode_message('Global name %s not found.' % word) return False - source, _ = get_assist_params() if len(modules) == 1: _insert_import(word, modules[0], ctx) @@ -349,6 +352,8 @@ class RopeContext(object): def __init__(self, path, project_path): self.path = path + debug('Init rope context %s' % self.path) + self.project = project.Project( project_path, fscommands=FileSystemCommands()) @@ -360,7 +365,7 @@ def __init__(self, path, project_path): self.resource = None self.options = dict( completeopt=vim.eval('&completeopt'), - autoimport=vim.eval('g:pymode_rope_autoimport'), + autoimport=int(vim.eval('g:pymode_rope_autoimport')), autoimport_modules=vim.eval('g:pymode_rope_autoimport_modules'), goto_definition_cmd=vim.eval('g:pymode_rope_goto_definition_cmd'), ) @@ -376,6 +381,13 @@ def __enter__(self): self.options['encoding'] = vim.eval('&encoding') self.resource = libutils.path_to_resource( self.project, vim.current.buffer.name, 'file') + + if not self.resource.exists() or os.path.isdir( + self.resource.real_path): + self.resource = None + else: + debug('Found resource "%s"' % self.resource.path) + return self def __exit__(self, t, value, traceback): @@ -435,6 +447,11 @@ def run(self): """ with RopeContext() as ctx: + + if not ctx.resource: + pymode_error("You should save the file before refactoring.") + return None + try: pymode_message(self.__doc__) refactor = self.get_refactor(ctx) @@ -728,7 +745,11 @@ def get_refactor(ctx): ctx.project, ctx.resource, offset) def get_changes(self, refactor, input_string): - """ Function description. """ + """ Function description. + + :return Rope.changes: + + """ args = re.sub(r'[\s\(\)]+', '', input_string).split(',') olds = [arg[0] for arg in refactor.get_args()] @@ -771,9 +792,12 @@ def get_refactor(self, ctx): self.kind, ctx.project, ctx.resource, offset) def get_changes(self, refactor, input_str): - """ Function description. """ + """ Function description. + + :return Rope.changes: + + """ - print(refactor) return refactor.get_changes() @@ -847,7 +871,11 @@ def _get_autoimport_proposals(out, ctx, source, offset, dot=False): @catch_and_print_exceptions def complete_check(): - """ Function description. """ + """ Function description. + + :return bool: + + """ row, column = vim.current.window.cursor line = vim.current.buffer[row - 1] @@ -868,7 +896,6 @@ def complete_check(): if not pymode_confirm(True, "Import %s?" % name): return False - source, _ = get_assist_params() if len(modules) == 1: _insert_import(name, modules[0], ctx) diff --git a/pymode/utils.py b/pymode/utils.py index 728387ff..101b5b7a 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -14,7 +14,7 @@ from io import StringIO - +DEBUG = int(vim.eval('g:pymode_debug')) PY2 = sys.version_info[0] == 2 @@ -129,6 +129,8 @@ def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except (Exception, vim.error) as e: # noqa + if DEBUG: + raise pymode_error(e) return None return wrapper @@ -136,7 +138,6 @@ def wrapper(*args, **kwargs): @contextmanager def silence_stderr(): - """ Redirect stderr. """ with threading.Lock(): @@ -158,3 +159,12 @@ def patch_paths(): sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs2')) else: sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs3')) + + +debug = lambda _: None + +if DEBUG: + def debug(msg): # noqa + """ Debug message. """ + + print(msg) diff --git a/t/rope.vim b/t/rope.vim new file mode 100644 index 00000000..97f22e1e --- /dev/null +++ b/t/rope.vim @@ -0,0 +1,31 @@ +let g:pymode_rope_completion_bind = 'X' +let g:pymode_rope_autoimport = 0 + +source plugin/pymode.vim + +describe 'pymode-plugin' + + before + set filetype=python + end + + after + bd! + bd! + end + + it 'pymode rope auto open project in current working directory' + let project_path = getcwd() . '/.ropeproject' + Expect isdirectory(project_path) == 0 + call pymode#rope#complete(0) + Expect isdirectory(project_path) == 1 + end + + it 'pymode rope completion' + source after/ftplugin/python.vim + Expect &ft == 'python' + normal oimporX + Expect getline('.') == 'import' + end + +end diff --git a/t/trouble.vim b/t/trouble.vim index 56a6d12d..b2dd2c30 100644 --- a/t/trouble.vim +++ b/t/trouble.vim @@ -13,4 +13,3 @@ describe 'pymode troubleshooting' end end - From 0d36f065875604b520e0e2e7215cb21a09f484ec Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 16:24:35 +0700 Subject: [PATCH 007/428] Fix Autoimport on unsaved file --- autoload/pymode.vim | 4 ++-- autoload/pymode/rope.vim | 3 --- pymode/rope.py | 7 +++++++ t/plugin.vim | 5 +++++ t/rope.vim | 5 +---- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index dbc7377b..4e881886 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -86,11 +86,11 @@ fun! pymode#save() "{{{ try noautocmd write catch /E212/ - call pymode#error("File modified and I can't save it. Cancel code checking.") + call pymode#error("File modified and I can't save it. Please save it manually.") return 0 endtry endif - return 1 + return expand('%') != '' endfunction "}}} fun! pymode#reload_buf_by_nr(nr) "{{{ diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim index f9d98d9f..737dd71d 100644 --- a/autoload/pymode/rope.vim +++ b/autoload/pymode/rope.vim @@ -161,9 +161,6 @@ fun! pymode#rope#module_to_package() "{{{ endfunction "}}} fun! pymode#rope#autoimport(word) "{{{ - if !pymode#save() - return 0 - endif PymodePython rope.autoimport() endfunction "}}} diff --git a/pymode/rope.py b/pymode/rope.py index 5ff8d0ed..a4174e01 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -909,6 +909,13 @@ def complete_check(): def _insert_import(name, module, ctx): + if not ctx.resource: + source, _ = get_assist_params() + lineno = ctx.importer.find_insertion_line(source) + line = 'from %s import %s' % (module, name) + vim.current.buffer[lineno - 1:lineno - 1] = [line] + return True + pyobject = ctx.project.pycore.resource_to_pyobject(ctx.resource) import_tools = importutils.ImportTools(ctx.project.pycore) module_imports = import_tools.module_imports(pyobject) diff --git a/t/plugin.vim b/t/plugin.vim index bfa9cf2d..33951f8f 100644 --- a/t/plugin.vim +++ b/t/plugin.vim @@ -27,6 +27,11 @@ describe 'pymode-plugin' Expect getline('$') == 'test success' end + it 'pymode save' + Expect expand('%') == '' + Expect pymode#save() == 0 + end + end diff --git a/t/rope.vim b/t/rope.vim index 97f22e1e..bc10fbc6 100644 --- a/t/rope.vim +++ b/t/rope.vim @@ -1,6 +1,5 @@ let g:pymode_rope_completion_bind = 'X' let g:pymode_rope_autoimport = 0 - source plugin/pymode.vim describe 'pymode-plugin' @@ -15,15 +14,13 @@ describe 'pymode-plugin' end it 'pymode rope auto open project in current working directory' - let project_path = getcwd() . '/.ropeproject' + let project_path = '.ropeproject' Expect isdirectory(project_path) == 0 call pymode#rope#complete(0) Expect isdirectory(project_path) == 1 end it 'pymode rope completion' - source after/ftplugin/python.vim - Expect &ft == 'python' normal oimporX Expect getline('.') == 'import' end From 3f2fd87ee8af972ee3a697f5a31ba6df4d85384f Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 16:33:03 +0700 Subject: [PATCH 008/428] Force lint every save --- autoload/pymode.vim | 2 +- doc/pymode.txt | 4 ++++ plugin/pymode.vim | 5 ++++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 4e881886..77834bed 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -112,7 +112,7 @@ fun! pymode#buffer_post_write() "{{{ if b:pymode_modified && g:pymode_rope_regenerate_on_write call pymode#rope#regenerate() endif - if b:pymode_modified && g:pymode_lint_on_write + if g:pymode_lint_on_write && (b:pymode_modified || g:pymode_lint_unmodified) call pymode#lint#check() endif endfunction "}}} diff --git a/doc/pymode.txt b/doc/pymode.txt index fc47986f..06a450e1 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -276,6 +276,10 @@ Check code on every save (if file has been modified) *'g:pymode_lint_on_write'* > let g:pymode_lint_on_write = 1 +Check code on every save (every) *'g:pymode_lint_unmodified'* +> + let g:pymode_lint_unmodified = 1 + Check code when editting (onfly) *'g:pymode_lint_on_fly'* > let g:pymode_lint_on_fly = 1 diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 78e02d39..8d7bcaae 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -83,9 +83,12 @@ call pymode#default('g:pymode_lint', 1) call pymode#default('g:pymode_lint_async', 1) call pymode#default('g:pymode_lint_async_updatetime', 1000) -" Check code every save. +" Check code every save if file has been modified call pymode#default("g:pymode_lint_on_write", 1) +" Check code every save (every) +call pymode#default("g:pymode_lint_unmodified", 0) + " Check code on fly call pymode#default("g:pymode_lint_on_fly", 0) From 0f788709201bce49001e961886ebac8ab3aac7ac Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 16:36:32 +0700 Subject: [PATCH 009/428] Try to fix travis --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index b377db57..407991c1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,4 +2,5 @@ language: ruby python: "2.7" rvm: - 1.9.3 -script: rake ci +script: + - make test From 17483befd8475090511579ef614adda21bb87ea8 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 16:40:24 +0700 Subject: [PATCH 010/428] cleanup --- ftplugin/python/pymode.vim | 2 -- 1 file changed, 2 deletions(-) diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index a64787c5..0ec77b4d 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -72,8 +72,6 @@ if g:pymode_lint command! -buffer -nargs=0 PymodeLint :call pymode#lint#check() let b:pymode_error_line = -1 - let b:pymode_errors = {} - let b:pymode_signs = [] if g:pymode_lint_on_fly au! InsertLeave PymodeLint From 0d448ad54fa3b598b22d14803f078e640f573544 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 16:42:10 +0700 Subject: [PATCH 011/428] Try to fix travis --- .travis.yml | 2 +- Makefile | 6 ++++++ t/rope.vim | 2 ++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 407991c1..e10ed9f1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,4 +3,4 @@ python: "2.7" rvm: - 1.9.3 script: - - make test + - make travis diff --git a/Makefile b/Makefile index 42016f43..e30f836d 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,12 @@ PYLAMA = $(LIBS)/pylama clean: find . -name "*.pyc" -delete +# Temporary disable rope tests on Travis +.PHONY: travis +travis: + rm -rf t/rope.vim + rake test + .PHONY: test test: bundle install diff --git a/t/rope.vim b/t/rope.vim index bc10fbc6..4211638a 100644 --- a/t/rope.vim +++ b/t/rope.vim @@ -1,5 +1,7 @@ let g:pymode_rope_completion_bind = 'X' let g:pymode_rope_autoimport = 0 +let g:pymode_debug = 1 + source plugin/pymode.vim describe 'pymode-plugin' From 550f55cec528cfb28cbeef3f183d4fff7f9e3493 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 17:59:13 +0700 Subject: [PATCH 012/428] Fix bugs. --- Changelog.rst | 11 +++++++---- Makefile | 2 +- autoload/pymode.vim | 13 +++++++++++-- autoload/pymode/lint.vim | 6 ++---- autoload/pymode/tools/loclist.vim | 2 ++ autoload/pymode/tools/signs.vim | 3 +-- doc/pymode.txt | 19 +++++++++++++++++-- ftplugin/python/pymode.vim | 16 +++++----------- plugin/pymode.vim | 3 +++ pymode/lint.py | 3 +++ pymode/rope.py | 8 +++++++- t/rope.vim | 6 +----- 12 files changed, 60 insertions(+), 32 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index cd9e49cc..41eec5e1 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -21,18 +21,21 @@ Changelog 'pymode_rope_enable_autoimport' -> 'pymode_rope_autoimport' * Options removed: + 'pymode_lint_hold', 'pymode_lint_config', 'pymode_lint_jump', 'pymode_lint_signs_always_visible', 'pymode_rope_extended_complete', 'pymode_rope_auto_project', 'pymode_rope_autoimport_generate', 'pymode_rope_autoimport_underlines', 'pymode_rope_codeassist_maxfixes', 'pymode_rope_sorted_completions', 'pymode_rope_extended_complete', 'pymode_rope_confirm_saving', 'pymode_rope_global_prefix', - 'pymode_rope_local_prefix', 'pymode_rope_vim_completion', 'pymode_rope_guess_project', - 'pymode_rope_goto_def_newwin', 'pymode_rope_always_show_complete_menu' + 'pymode_rope_local_prefix', 'pymode_rope_vim_completion', + 'pymode_rope_guess_project', 'pymode_rope_goto_def_newwin', + 'pymode_rope_always_show_complete_menu' * Options added: - 'pymode_rope_regenerate_on_write', 'pymode_rope_completion', 'pymode_rope_complete_on_dot', - 'pymode_lint_sort' + 'pymode_rope_regenerate_on_write', 'pymode_rope_completion', + 'pymode_rope_complete_on_dot', 'pymode_lint_sort', + 'pymode_rope_look_project', 'pymode_lint_unmodified' * Commands added: 'PymodeVirtualenv' diff --git a/Makefile b/Makefile index e30f836d..090a77e6 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ clean: # Temporary disable rope tests on Travis .PHONY: travis travis: - rm -rf t/rope.vim + # rm -rf t/rope.vim rake test .PHONY: test diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 77834bed..97d65f9c 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -106,13 +106,22 @@ endfunction "}}} fun! pymode#buffer_pre_write() "{{{ let b:pymode_modified = &modified + if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) + call pymode#debug('check code') + call pymode#lint#check() + endif endfunction fun! pymode#buffer_post_write() "{{{ if b:pymode_modified && g:pymode_rope_regenerate_on_write + call pymode#debug('regenerate') call pymode#rope#regenerate() endif - if g:pymode_lint_on_write && (b:pymode_modified || g:pymode_lint_unmodified) - call pymode#lint#check() +endfunction "}}} + +fun! pymode#debug(msg) "{{{ + if g:pymode_debug + let g:pymode_debug += 1 + echom string(g:pymode_debug) . ': ' . string(a:msg) endif endfunction "}}} diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index 6048a7ef..2166b8e7 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -48,8 +48,6 @@ endfunction "}}} fun! pymode#lint#check() "{{{ " DESC: Run checkers on current file. " - if !g:pymode_lint | return | endif - let loclist = g:PymodeLocList.current() let b:pymode_error_line = -1 @@ -64,13 +62,13 @@ fun! pymode#lint#check() "{{{ call pymode#wide_message('Code checking is completed. No errors found.') endif + call g:PymodeSigns.refresh(loclist) + if g:pymode_lint_cwindow call setqflist(loclist._loclist) call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_minheight, 0) endif - call g:PymodeSigns.refresh(loclist) - call pymode#lint#show_errormessage() call pymode#wide_message('Found errors and warnings: ' . len(loclist._loclist)) diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim index ca35abab..b9db76b8 100644 --- a/autoload/pymode/tools/loclist.vim +++ b/autoload/pymode/tools/loclist.vim @@ -18,6 +18,7 @@ fun! g:PymodeLocList.current() "{{{ if !exists("b:pymode_loclist") let b:pymode_loclist = g:PymodeLocList.init([]) endif + let b:pymode_loclist._bufnr = bufnr('.') return b:pymode_loclist endfunction "}}} @@ -30,6 +31,7 @@ endfunction "}}} fun! g:PymodeLocList.clear() "{{{ let self._loclist = [] let self._messages = {} + let self._bufnr = bufnr('') endfunction "}}} diff --git a/autoload/pymode/tools/signs.vim b/autoload/pymode/tools/signs.vim index dbac8c39..54e46643 100644 --- a/autoload/pymode/tools/signs.vim +++ b/autoload/pymode/tools/signs.vim @@ -45,12 +45,11 @@ endfunction "}}} fun! g:PymodeSigns.place(loclist) "{{{ let seen = {} - let buf = bufnr('') for issue in a:loclist._loclist if !has_key(seen, issue.lnum) let seen[issue.lnum] = 1 call add(self._sign_ids, self._next_id) - execute printf('sign place %d line=%d name=%s buffer=%d', self._next_id, issue.lnum, "Pymode".issue.type[0], buf) + execute printf('sign place %d line=%d name=%s buffer=%d', self._next_id, issue.lnum, "Pymode".issue.type[0], issue.bufnr) let self._next_id += 1 endif endfor diff --git a/doc/pymode.txt b/doc/pymode.txt index 06a450e1..22726cb6 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -346,6 +346,11 @@ Commands: |:PymodeRopeRenameModule| -- Rename current module |:PymodeRopeUndo| -- Undo changes from last refactoring + +Turn on the rope script *'g:pymode_rope'* +> + let g:pymode_rope = 1 + .roperoject Folder ~ *.ropeproject* @@ -366,10 +371,18 @@ Currently it is used for things such as: * It can be used for saving object information to help rope object inference. * It can be used for saving global names cache which is used in auto-import. +If ``.ropeproject`` is not found in the current directory, rope will walk +upwards looking for a ``.ropeproject`` in every dir of the parent path. If +rope finds ``.ropeproject`` in a parent dir, it sets the project for all child +dirs and the scan may be slow for so many dirs and files. -Turn on the rope script *'g:pymode_rope'* +Enable search |.ropeproject| in parent's directories + *'g:pymode_rope_look_project'* > - let g:pymode_rope = 1 + let g:pymode_rope_look_project = 1 + + +Show documentation for element under cursor ~ Show documentation for object under cursor. *'g:pymode_rope_show_doc_bind'* Leave empty for disable key binding. @@ -628,6 +641,8 @@ Solutions: in the current dir. - Run ``:PymodeRopeNewProject`` to make rope create ``.ropeproject`` in the current dir. +- Set |'g:pymode_rope_look_project'| to 0 for prevent searching in parent + dirs. Pylint check is very slow diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 0ec77b4d..2492f44b 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -11,15 +11,11 @@ if !pymode#default('g:pymode_init', 1) PymodePython patch_paths() endif -augroup pymode - - au! - command! -buffer -nargs=1 PymodeVirtualenv call pymode#virtualenv#activate() " Setup events for pymode -au BufWritePre call pymode#buffer_pre_write() -au BufWritePost call pymode#buffer_post_write() +au! pymode BufWritePre call pymode#buffer_pre_write() +au! pymode BufWritePost call pymode#buffer_post_write() " Run python code if g:pymode_run @@ -74,12 +70,12 @@ if g:pymode_lint let b:pymode_error_line = -1 if g:pymode_lint_on_fly - au! InsertLeave PymodeLint + au! pymode InsertLeave PymodeLint endif if g:pymode_lint_message - au! CursorMoved - au! CursorMoved call pymode#lint#show_errormessage() + au! pymode CursorMoved + au! pymode CursorMoved call pymode#lint#show_errormessage() endif " Disabled for current release @@ -187,5 +183,3 @@ if g:pymode_rope end end - -augroup END diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 8d7bcaae..7c4b8f07 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -144,6 +144,9 @@ call pymode#default('g:pymode_breakpoint_cmd', '') " Rope support call pymode#default('g:pymode_rope', 1) +" If project hasnt been finded in current working directory, look at parents directory +call pymode#default('g:pymode_rope_look_project', 1) + " Enable Rope completion call pymode#default('g:pymode_rope_completion', 1) diff --git a/pymode/lint.py b/pymode/lint.py index a42f8b6b..b0c27be0 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -50,5 +50,8 @@ def __sort(e): if sort_rules: errors = sorted(errors, key=__sort) + for e in errors: + e['bufnr'] = b.number + vim.command( 'call g:PymodeLocList.current().extend(%s)' % json.dumps(errors)) diff --git a/pymode/rope.py b/pymode/rope.py index a4174e01..b47f7594 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -238,6 +238,7 @@ def organize_imports(): reload_changes(changes) +@catch_and_print_exceptions def regenerate(): """ Clear cache. """ with RopeContext() as ctx: @@ -306,7 +307,10 @@ def get_ctx(*args, **kwargs): if resources.get(path): return resources.get(path) - project_path = look_ropeproject(os.path.dirname(path)) + project_path = os.path.dirname(vim.eval('getcwd()')) + if int(vim.eval('g:pymode_rope_look_project')): + project_path = look_ropeproject(project_path) + ctx = projects.get(project_path) if not ctx: projects[project_path] = ctx = cls(path, project_path) @@ -376,6 +380,8 @@ def __init__(self, path, project_path): if self.options.get('autoimport') == '1': self.generate_autoimport_cache() + debug('Context inited %s' % project_path) + def __enter__(self): self.project.validate(self.project.root) self.options['encoding'] = vim.eval('&encoding') diff --git a/t/rope.vim b/t/rope.vim index 4211638a..8656aa89 100644 --- a/t/rope.vim +++ b/t/rope.vim @@ -18,13 +18,9 @@ describe 'pymode-plugin' it 'pymode rope auto open project in current working directory' let project_path = '.ropeproject' Expect isdirectory(project_path) == 0 - call pymode#rope#complete(0) - Expect isdirectory(project_path) == 1 - end - - it 'pymode rope completion' normal oimporX Expect getline('.') == 'import' + Expect isdirectory(project_path) == 1 end end From 757c08d218606889ea21b7b50ef98f9f56aed761 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 19:03:43 +0700 Subject: [PATCH 013/428] Add debug configuration --- README.rst | 11 +++++++++-- autoload/pymode.vim | 8 ++++---- debug.vim | 13 +++++++++++++ plugin/pymode.vim | 3 +++ 4 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 debug.vim diff --git a/README.rst b/README.rst index e000669a..62cba9a8 100644 --- a/README.rst +++ b/README.rst @@ -98,9 +98,16 @@ Then rebuild **helptags** in vim:: Troubleshooting =============== -If your python-mode doesn't work: open any python file and type: :: +If your python-mode doesn't work: - :call pymode#troubleshooting#test() +1. Load Vim with only python-mode enabled (use `debug.vim` from pymode): :: + + vim -u /debug.vim + +And try to repeat your case. If no error occurs, seems like problem isnt in the +plugin. + +2. Type `:PymodeTroubleshooting` And fix any warnings or copy the output and send it to me. (For example, by creating a `new github issue `_ diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 97d65f9c..4b5094e8 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -106,10 +106,6 @@ endfunction "}}} fun! pymode#buffer_pre_write() "{{{ let b:pymode_modified = &modified - if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) - call pymode#debug('check code') - call pymode#lint#check() - endif endfunction fun! pymode#buffer_post_write() "{{{ @@ -117,6 +113,10 @@ fun! pymode#buffer_post_write() "{{{ call pymode#debug('regenerate') call pymode#rope#regenerate() endif + if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) + call pymode#debug('check code') + call pymode#lint#check() + endif endfunction "}}} fun! pymode#debug(msg) "{{{ diff --git a/debug.vim b/debug.vim new file mode 100644 index 00000000..c7d32661 --- /dev/null +++ b/debug.vim @@ -0,0 +1,13 @@ +" Use this settings for testing the plugin. +" Run vim with command +" +" $ vim -u debug.py +" +" Only python-mode will be loaded. + + +execute('set rtp+='. expand(':p:h')) +set rtp -=$HOME/.vim +set rtp -=$HOME/.vim/after +set nocp +syntax enable diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 7c4b8f07..ce4ffdb9 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -13,6 +13,9 @@ if !g:pymode || &cp finish endif +" Pymode needs +filetype plugin on + " OPTIONS: {{{ " Vim Python interpreter. Set to 'disable' for remove python features. From 26540c56fb82df20cd820f34a4ee91a108a72b50 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 19:22:29 +0700 Subject: [PATCH 014/428] Fix #331 --- pymode/autopep8.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/autopep8.py b/pymode/autopep8.py index 96351af8..3f400ddd 100644 --- a/pymode/autopep8.py +++ b/pymode/autopep8.py @@ -55,7 +55,7 @@ import tokenize import warnings -from .pylama.lint.pylama_pep8 import pep8 +from pylama.lint.pylama_pep8 import pep8 try: From b1a91943f0ed4522e0f7d12a0c3e00a6fe46d398 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 19:25:01 +0700 Subject: [PATCH 015/428] dirty hack for travis --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 090a77e6..e30f836d 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ clean: # Temporary disable rope tests on Travis .PHONY: travis travis: - # rm -rf t/rope.vim + rm -rf t/rope.vim rake test .PHONY: test From e70721f05ac41927b1b74d1dbb4d6afb3688a619 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 19:26:05 +0700 Subject: [PATCH 016/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 41eec5e1..899c4e90 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-01 0.7.1b +## 2013-12-02 0.7.2b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index 22726cb6..4c55ffb3 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.1b + Version: 0.7.2b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index ce4ffdb9..ea3df262 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.1b" +let g:pymode_version = "0.7.2b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From a3d5e50202d917eccc5580367a2dd27775b08743 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 19:36:20 +0700 Subject: [PATCH 017/428] Hide mock vim --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 93a0a7a0..3832bf8a 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ tags test.py todo.txt vendor +vim.py From 3485b69277ef6957040a666ee1fb64c91403b6ed Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 20:05:39 +0700 Subject: [PATCH 018/428] Try to fix #329 --- autoload/pymode.vim | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 4b5094e8..fe91a597 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -47,11 +47,12 @@ fun! pymode#quickfix_open(onlyRecognized, maxHeight, minHeight, jumpError) "{{{ let numErrors = len(filter(getqflist(), 'v:val.valid')) let numOthers = len(getqflist()) - numErrors if numErrors > 0 || (!a:onlyRecognized && numOthers > 0) + let num = winnr() botright copen exe max([min([line("$"), a:maxHeight]), a:minHeight]) . "wincmd _" if a:jumpError cc - else + elseif num != winnr() wincmd p endif else From cf8ddc70650a850edfce1e9408874e9f037966b7 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 21:04:39 +0700 Subject: [PATCH 019/428] Switch to loclist --- autoload/pymode.vim | 16 ++++++++++------ autoload/pymode/lint.vim | 3 +-- autoload/pymode/rope.vim | 8 ++++++-- autoload/pymode/run.vim | 8 +++++++- autoload/pymode/tools/loclist.vim | 20 ++++++++++++++++++-- 5 files changed, 42 insertions(+), 13 deletions(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index fe91a597..bc325391 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -110,13 +110,17 @@ fun! pymode#buffer_pre_write() "{{{ endfunction fun! pymode#buffer_post_write() "{{{ - if b:pymode_modified && g:pymode_rope_regenerate_on_write - call pymode#debug('regenerate') - call pymode#rope#regenerate() + if g:pymode_rope + if b:pymode_modified && g:pymode_rope_regenerate_on_write + call pymode#debug('regenerate') + call pymode#rope#regenerate() + endif endif - if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) - call pymode#debug('check code') - call pymode#lint#check() + if g:pymode_lint + if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) + call pymode#debug('check code') + call pymode#lint#check() + endif endif endfunction "}}} diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index 2166b8e7..e7a0f1c5 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -65,8 +65,7 @@ fun! pymode#lint#check() "{{{ call g:PymodeSigns.refresh(loclist) if g:pymode_lint_cwindow - call setqflist(loclist._loclist) - call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_minheight, 0) + call loclist.show() endif call pymode#lint#show_errormessage() diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim index 737dd71d..a7e9926c 100644 --- a/autoload/pymode/rope.vim +++ b/autoload/pymode/rope.vim @@ -2,6 +2,8 @@ " PymodePython from pymode import rope +call pymode#tools#loclist#init() + fun! pymode#rope#completions(findstart, base) PymodePython rope.completions() @@ -56,8 +58,10 @@ fun! pymode#rope#find_it() PymodePython rope.find_it() call pymode#wide_message('') if !empty(l:output) - call setqflist(l:output) - call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_minheight, 0) + let loclist = g:PymodeLocList.current() + let loclist._loclist = l:output + let loclist._title = "Occurrences" + call loclist.show() end endfunction diff --git a/autoload/pymode/run.vim b/autoload/pymode/run.vim index 5f177acd..714f3235 100644 --- a/autoload/pymode/run.vim +++ b/autoload/pymode/run.vim @@ -43,6 +43,8 @@ let s:efm .= '%-G%.%#' PymodePython from pymode.run import run_code +call pymode#tools#loclist#init() + " DESC: Run python code fun! pymode#run#code_run(line1, line2) "{{{ @@ -86,7 +88,11 @@ fun! pymode#run#code_run(line1, line2) "{{{ call setqflist(qflist) endif - call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_maxheight, 0) + let loclist = g:PymodeLocList.current() + let loclist._loclist = getqflist() + let loclist._title = "Run errors" + call loclist.show() + let &efm = l:_efm catch /E234/ diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim index b9db76b8..364c9a7e 100644 --- a/autoload/pymode/tools/loclist.vim +++ b/autoload/pymode/tools/loclist.vim @@ -10,6 +10,7 @@ fun! g:PymodeLocList.init(raw_list) "{{{ let obj = copy(self) let loc_list = filter(copy(a:raw_list), 'v:val["valid"] == 1') call obj.clear() + let obj._title = 'CodeCheck' return obj endfunction "}}} @@ -18,7 +19,6 @@ fun! g:PymodeLocList.current() "{{{ if !exists("b:pymode_loclist") let b:pymode_loclist = g:PymodeLocList.init([]) endif - let b:pymode_loclist._bufnr = bufnr('.') return b:pymode_loclist endfunction "}}} @@ -31,7 +31,7 @@ endfunction "}}} fun! g:PymodeLocList.clear() "{{{ let self._loclist = [] let self._messages = {} - let self._bufnr = bufnr('') + let self._name = expand('%:t') endfunction "}}} @@ -62,3 +62,19 @@ fun! g:PymodeLocList.filter(filters) "{{{ endfor return loclist endfunction "}}} + + +fun! g:PymodeLocList.show() "{{{ + call setloclist(0, self._loclist) + if self.is_empty() + lclose + else + let num = winnr() + execute "lopen " . g:pymode_quickfix_maxheight + execute max([min([line("$"), g:pymode_quickfix_maxheight]), g:pymode_quickfix_minheight]) . "wincmd _" + if num != winnr() + call setwinvar(winnr(), 'quickfix_title', self._title . ' <' . self._name . '>') + wincmd p + endif + end +endfunction "}}} From 64cf32feccefc481458087a6f98e7b7cb75755a5 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 21:13:16 +0700 Subject: [PATCH 020/428] Remove pymode_rope_look_project --- Changelog.rst | 2 +- doc/pymode.txt | 6 +++--- plugin/pymode.vim | 2 +- pymode/rope.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 899c4e90..be8036a2 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -35,7 +35,7 @@ Changelog * Options added: 'pymode_rope_regenerate_on_write', 'pymode_rope_completion', 'pymode_rope_complete_on_dot', 'pymode_lint_sort', - 'pymode_rope_look_project', 'pymode_lint_unmodified' + 'pymode_rope_lookup_project', 'pymode_lint_unmodified' * Commands added: 'PymodeVirtualenv' diff --git a/doc/pymode.txt b/doc/pymode.txt index 4c55ffb3..851c03ee 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -377,9 +377,9 @@ rope finds ``.ropeproject`` in a parent dir, it sets the project for all child dirs and the scan may be slow for so many dirs and files. Enable search |.ropeproject| in parent's directories - *'g:pymode_rope_look_project'* + *'g:pymode_rope_lookup_project'* > - let g:pymode_rope_look_project = 1 + let g:pymode_rope_lookup_project = 1 Show documentation for element under cursor ~ @@ -641,7 +641,7 @@ Solutions: in the current dir. - Run ``:PymodeRopeNewProject`` to make rope create ``.ropeproject`` in the current dir. -- Set |'g:pymode_rope_look_project'| to 0 for prevent searching in parent +- Set |'g:pymode_rope_lookup_project'| to 0 for prevent searching in parent dirs. diff --git a/plugin/pymode.vim b/plugin/pymode.vim index ea3df262..5f2b774c 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -148,7 +148,7 @@ call pymode#default('g:pymode_breakpoint_cmd', '') call pymode#default('g:pymode_rope', 1) " If project hasnt been finded in current working directory, look at parents directory -call pymode#default('g:pymode_rope_look_project', 1) +call pymode#default('g:pymode_rope_lookup_project', 1) " Enable Rope completion call pymode#default('g:pymode_rope_completion', 1) diff --git a/pymode/rope.py b/pymode/rope.py index b47f7594..c2f6ae90 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -308,7 +308,7 @@ def get_ctx(*args, **kwargs): return resources.get(path) project_path = os.path.dirname(vim.eval('getcwd()')) - if int(vim.eval('g:pymode_rope_look_project')): + if int(vim.eval('g:pymode_rope_lookup_project')): project_path = look_ropeproject(project_path) ctx = projects.get(project_path) From 79e160e550d1014b27b5f271c86e82f43781fd1e Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 21:25:28 +0700 Subject: [PATCH 021/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index be8036a2..56b8fd9c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-02 0.7.2b +## 2013-12-02 0.7.3b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index 851c03ee..32855d68 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.2b + Version: 0.7.3b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 5f2b774c..201601b8 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.2b" +let g:pymode_version = "0.7.3b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From ced89b5123e0b006e04e84e5c2abfd724046374f Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Mon, 2 Dec 2013 15:49:13 +0100 Subject: [PATCH 022/428] doc: corrections while reading it --- doc/pymode.txt | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 851c03ee..5e47930f 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -358,25 +358,25 @@ Turn on the rope script *'g:pymode_rope'* *:PymodeRopeRegenerate* -- Regenerate the project cache Rope uses a folder inside projects for holding project configuration and data. -Its default name is `.ropeproject` If the folder doesnt exists in current -working directory, parent folders will be checked. +Its default name is `.ropeproject`. Currently it is used for things such as: -* There is a config.py file in this folder in which you can change project - configurations. Have look at the default config.py file (s created when it +* The config.py file in this folder contains project configuration. Have + a look at the default config.py file (which is created when it does not exist) for more information. * It can be used for saving project history, so that the next time you open the project you can undo past changes. -* It can be used for saving object information to help rope object inference. -* It can be used for saving global names cache which is used in auto-import. +* It can be used to save information about object inferences. +* It can be used to save a global name cache, which is used for auto-import. -If ``.ropeproject`` is not found in the current directory, rope will walk -upwards looking for a ``.ropeproject`` in every dir of the parent path. If -rope finds ``.ropeproject`` in a parent dir, it sets the project for all child -dirs and the scan may be slow for so many dirs and files. +If `.ropeproject` is not found in the current directory, rope will look +recursively for it in parent folders. +Warning: If rope finds `.ropeproject` in a parent dir, it will use it with +all its child directories, which may slow scanning down (because of many, +possibly unrelated, files) -Enable search |.ropeproject| in parent's directories +Enable searching for |.ropeproject| in parent directories *'g:pymode_rope_lookup_project'* > let g:pymode_rope_lookup_project = 1 @@ -385,7 +385,7 @@ Enable search |.ropeproject| in parent's directories Show documentation for element under cursor ~ Show documentation for object under cursor. *'g:pymode_rope_show_doc_bind'* -Leave empty for disable key binding. +Leave empty to disable the key binding. > let g:pymode_rope_show_doc_bind = 'd' @@ -397,18 +397,18 @@ Regenerate project cache on every save (if file has been modified) 4.1 Completion ~ *pymode-completion* -By default you could typing for autocompletion. Will be +By default you can use for autocompletion. Will be automatically selected first entry and you can press to insert in your code. and / works too. -Autocompletion is also called by typing a period in |Insert| mode. +Autocompletion is also called by typing a period in |Insert| mode by default. Turn on code completion support in the plugin *'g:pymode_rope_completion'* > let g:pymode_rope_completion = 1 -Turn on autocompletion when you typing a period +Turn on autocompletion when typing a period *'g:pymode_rope_complete_on_dot'* > let g:pymode_rope_complete_on_dot = 1 @@ -417,8 +417,8 @@ Keymap for autocomplete *'g:pymode_rope_completion_bind'* > let g:pymode_rope_completion_bind = '' -Extended autocompletion (rope could complete objects wich hasnt be imported) -from project *'g:pymode_rope_autoimport'* +Extended autocompletion (rope could complete objects which have not been +imported) from project *'g:pymode_rope_autoimport'* > let g:pymode_rope_autoimport = 1 From 63b5c2a89ff73c1f89cac9f6f2c469e19a341648 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 2 Dec 2013 21:54:09 +0700 Subject: [PATCH 023/428] Clear signs after autopep8 --- autoload/pymode/lint.vim | 1 + 1 file changed, 1 insertion(+) diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index e7a0f1c5..b77bdba2 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -11,6 +11,7 @@ fun! pymode#lint#auto() "{{{ PymodePython from pymode import auto PymodePython auto() cclose + call g:PymodeSigns.clear() edit call pymode#wide_message("AutoPep8 done.") endfunction "}}} From 1bd7a1dc3db2159c000c8f686d8affdf89260d36 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 01:02:15 +0700 Subject: [PATCH 024/428] Fix #329 --- autoload/pymode/tools/loclist.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim index 364c9a7e..7daaea7a 100644 --- a/autoload/pymode/tools/loclist.vim +++ b/autoload/pymode/tools/loclist.vim @@ -74,7 +74,7 @@ fun! g:PymodeLocList.show() "{{{ execute max([min([line("$"), g:pymode_quickfix_maxheight]), g:pymode_quickfix_minheight]) . "wincmd _" if num != winnr() call setwinvar(winnr(), 'quickfix_title', self._title . ' <' . self._name . '>') - wincmd p + exe num . "wincmd w" endif end endfunction "}}} From 559db228389f6c61c19a954f1cf324ecc2a15213 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 01:15:31 +0700 Subject: [PATCH 025/428] More information in loclist when you searching occurencies. --- pymode/rope.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index c2f6ae90..c990cc16 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -203,6 +203,7 @@ def find_it(): for oc in occurrences: lst.append(dict( filename=oc.resource.path, + text=ctx.current[oc.lineno - 1], lnum=oc.lineno, )) vim.command('let l:output = %s' % json.dumps(lst)) @@ -367,6 +368,7 @@ def __init__(self, path, project_path): update_python_path(self.project.prefs.get('python_path', [])) self.resource = None + self.current = None self.options = dict( completeopt=vim.eval('&completeopt'), autoimport=int(vim.eval('g:pymode_rope_autoimport')), @@ -385,8 +387,9 @@ def __init__(self, path, project_path): def __enter__(self): self.project.validate(self.project.root) self.options['encoding'] = vim.eval('&encoding') + self.current = vim.current.buffer self.resource = libutils.path_to_resource( - self.project, vim.current.buffer.name, 'file') + self.project, self.current.name, 'file') if not self.resource.exists() or os.path.isdir( self.resource.real_path): From 7c4617a8199a79edd88d5416bf10f7960df12c95 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 04:23:19 +0700 Subject: [PATCH 026/428] Fix #323 --- autoload/pymode/rope.vim | 11 +- pymode/environment.py | 242 +++++++++++++++++++++++++++++++++++++ pymode/lint.py | 40 +++---- pymode/rope.py | 249 +++++++++++++++++---------------------- pymode/run.py | 28 ++--- pymode/utils.py | 32 ----- pymode/virtualenv.py | 17 ++- 7 files changed, 387 insertions(+), 232 deletions(-) create mode 100644 pymode/environment.py diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim index a7e9926c..a6170c33 100644 --- a/autoload/pymode/rope.vim +++ b/autoload/pymode/rope.vim @@ -53,16 +53,11 @@ endfunction fun! pymode#rope#find_it() - let l:output = [] + let loclist = g:PymodeLocList.current() + let loclist._title = "Occurrences" call pymode#wide_message('Finding Occurrences ...') PymodePython rope.find_it() - call pymode#wide_message('') - if !empty(l:output) - let loclist = g:PymodeLocList.current() - let loclist._loclist = l:output - let loclist._title = "Occurrences" - call loclist.show() - end + call loclist.show() endfunction diff --git a/pymode/environment.py b/pymode/environment.py new file mode 100644 index 00000000..98d6a530 --- /dev/null +++ b/pymode/environment.py @@ -0,0 +1,242 @@ +""" Define interfaces. """ + +from __future__ import print_function + +import vim +import json +import time +import os.path + +from .utils import PY2 + + +class VimPymodeEnviroment(object): + + """ Vim User interface. """ + + prefix = '[Pymode]' + + def __init__(self): + self.current = vim.current + self.options = dict(encoding=vim.eval('&enc')) + self.options['debug'] = self.var('g:pymode_debug', True) + + @property + def curdir(self): + """ Return current working directory. """ + + return self.var('getcwd()') + + @property + def curbuf(self): + """ Return current buffer. """ + + return self.current.buffer + + @property + def cursor(self): + """ Return current window position. + + :return tuple: (row, col) + + """ + return self.current.window.cursor + + @property + def source(self): + """ Return source of current buffer. """ + + return "\n".join(self.lines) + + @property + def lines(self): + """ Iterate by lines in current file. + + :return list: + + """ + if not PY2: + return self.curbuf + + return [l.decode(self.options.get('encoding')) for l in self.curbuf] + + def var(self, name, to_bool=False): + """ Get vim variable. + + :return vimobj: + + """ + + value = vim.eval(name) + + if to_bool: + try: + value = bool(int(value)) + except ValueError: + value = value + return value + + def message(self, msg, history=False): + """ Show message to user. """ + + if history: + return vim.command('echom "%s"' % str(msg)) + + return vim.command('call pymode#wide_message("%s")' % str(msg)) + + def user_input(self, msg, default=''): + """ Return user input or default. + + :return str: + + """ + msg = '%s %s ' % (self.prefix, msg) + + if default != '': + msg += '[%s] ' % default + + try: + vim.command('echohl Debug') + input_str = vim.eval('input("%s> ")' % msg) + vim.command('echohl none') + except KeyboardInterrupt: + input_str = '' + + return input_str or default + + def user_confirm(self, msg, yes=False): + """ Get user confirmation. + + :return bool: + + """ + default = 'yes' if yes else 'no' + action = self.user_input(msg, default) + return action and 'yes'.startswith(action) + + def user_input_choices(self, msg, *options): + """ Get one of many options. + + :return str: A choosen option + + """ + choices = ['%s %s' % (self.prefix, msg)] + choices += [ + "%s. %s" % (num, opt) for num, opt in enumerate(options, 1)] + try: + input_str = int( + vim.eval('inputlist(%s)' % self.prepare_value(choices))) + except (KeyboardInterrupt, ValueError): + input_str = 0 + + if not input_str: + self.message('Cancelled!') + return False + + try: + return options[input_str - 1] + except (IndexError, ValueError): + self.error('Invalid option: %s' % input_str) + return self.user_input_choices(msg, *options) + + def error(self, msg): + """ Show error to user. """ + vim.command('call pymode#error("%s")' % str(msg)) + + def debug(self, msg, *args): + """ Print debug information. """ + + if self.options.get('debug'): + print("%s %s [%s]" % ( + int(time.time()), msg, ', '.join([str(a) for a in args]))) + + def stop(self, value=None): + """ Break Vim function. """ + + cmd = 'return' + if value: + cmd += ' ' + self.prepare_value(value) + vim.command(cmd) + + def catch_exceptions(self, func): + """ Decorator. Make execution more silence. + + :return func: + + """ + + def _wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except (Exception, vim.error) as e: # noqa + if self.options.get('debug'): + raise + self.error(e) + return None + return _wrapper + + def run(self, name, *args): + """ Run vim function. """ + + vim.command('call %s(%s)' % (name, ", ".join([ + self.prepare_value(a) for a in args + ]))) + + def let(self, name, value): + """ Set variable. """ + cmd = 'let %s = %s' % (name, self.prepare_value(value)) + self.debug(cmd) + vim.command(cmd) + + def prepare_value(self, value): + """ Decode bstr to vim encoding. + + :return unicode string: + + """ + + value = json.dumps(value) + if PY2: + value = value.decode('utf-8').encode(self.options.get('encoding')) + + return value + + def get_offset_params(self, cursor=None, base=""): + """ Calculate current offset. + + :return tuple: (source, offset) + + """ + row, col = cursor or env.cursor + source = "" + offset = 0 + for i, line in enumerate(self.lines, 1): + if i == row: + source += line[:col] + base + offset = len(source) + source += line[col:] + else: + source += line + source += '\n' + env.debug('Get offset', base or None, row, col, offset) + return source, offset + + def goto_line(self, line): + """ Go to line. """ + + vim.command('normal %sggzz' % line) + + def goto_file(self, path, cmd='e', force=False): + """ Function description. """ + + if force or os.path.abspath(path) != self.curbuf.name: + self.debug('read', path) + vim.command("%s %s" % (cmd, path)) + + def goto_buffer(self, bufnr): + """ Open buffer. """ + if str(bufnr) != '-1': + vim.command('buffer %s' % bufnr) + + +env = VimPymodeEnviroment() diff --git a/pymode/lint.py b/pymode/lint.py index b0c27be0..d7c39dbe 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -1,7 +1,7 @@ """ Pylama integration. """ -import vim # noqa -from .utils import pymode_message, silence_stderr +from .environment import env +from .utils import silence_stderr import os.path @@ -15,31 +15,27 @@ def code_check(): from pylama.main import parse_options from pylama.tasks import check_path - import json - - b = vim.current.buffer - root = vim.eval('getcwd()') - linters = vim.eval('g:pymode_lint_checkers') - ignore = vim.eval('g:pymode_lint_ignore') - select = vim.eval('g:pymode_lint_select') options = parse_options( - ignore=ignore, select=select, linters=linters) + ignore=env.var('g:pymode_lint_ignore'), + select=env.var('g:pymode_lint_select'), + linters=env.var('g:pymode_lint_checkers'), + ) - path = b.name - if root: - path = os.path.relpath(path, root) + path = os.path.relpath(env.curbuf.name, env.curdir) + env.debug("Start code check: ", path) if getattr(options, 'skip', None) and any(p.match(path) for p in options.skip): # noqa - pymode_message('Skip code checking.') - vim.command('return') + env.message('Skip code checking.') + env.debug("Skipped") + env.stop() return False - code = '\n'.join(vim.current.buffer) - with silence_stderr(): - errors = check_path(path, options=options, code=code) - sort_rules = vim.eval('g:pymode_lint_sort') + errors = check_path(path, options=options, code=env.source) + + env.debug("Find errors: ", len(errors)) + sort_rules = env.var('g:pymode_lint_sort') def __sort(e): try: @@ -48,10 +44,10 @@ def __sort(e): return 999 if sort_rules: + env.debug("Find sorting: ", sort_rules) errors = sorted(errors, key=__sort) for e in errors: - e['bufnr'] = b.number + e['bufnr'] = env.curbuf.number - vim.command( - 'call g:PymodeLocList.current().extend(%s)' % json.dumps(errors)) + env.run('g:PymodeLocList.current().extend', errors) diff --git a/pymode/rope.py b/pymode/rope.py index c990cc16..a36d8318 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,7 +1,6 @@ """ Rope support in pymode. """ from __future__ import absolute_import, print_function -import json import multiprocessing import os.path import re @@ -14,34 +13,8 @@ from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa from rope.refactor import ModuleToPackage, ImportOrganizer, rename, extract, inline, usefunction, move, change_signature, importutils # noqa -import vim # noqa -from .utils import ( - pymode_message, pymode_error, pymode_input, pymode_inputlist, - pymode_confirm, catch_and_print_exceptions, debug) - -def get_assist_params(cursor=None, base=''): - """ Prepare source and get offset. - - :return source, offset: - - """ - if cursor is None: - cursor = vim.current.window.cursor - - row, column = cursor - source = "" - offset = 0 - - for i, line in enumerate(vim.current.buffer, 1): - if i == row: - source += line[:column] + base - offset = len(source) - source += line[column:] - else: - source += line - source += '\n' - return source, offset +from .environment import env def look_ropeproject(path): @@ -63,35 +36,39 @@ def look_ropeproject(path): p = new_p -@catch_and_print_exceptions +@env.catch_exceptions def completions(): - """ Search completions. """ + """ Search completions. + + :return None: - row, column = vim.current.window.cursor - if vim.eval('a:findstart') == '1': + """ + + row, col = env.cursor + if env.var('a:findstart', True): count = 0 - for char in reversed(vim.current.line[:column]): + for char in reversed(env.current.line[:col]): if not re.match(r'[\w\d]', char): break count += 1 - vim.command('return %i' % (column - count)) - return + env.debug('Complete find start', (col - count)) + return env.stop(col - count) - base = vim.eval('a:base') - source, offset = get_assist_params((row, column), base) + base = env.var('a:base') + source, offset = env.get_offset_params((row, col), base) proposals = get_proporsals(source, offset, base) - vim.command("return %s" % json.dumps(proposals)) + return env.stop(proposals) -@catch_and_print_exceptions +@env.catch_exceptions def complete(dot=False): """ Ctrl+Space completion. :return bool: success """ - row, column = vim.current.window.cursor - source, offset = get_assist_params((row, column)) + row, col = env.cursor + source, offset = env.get_offset_params() proposals = get_proporsals(source, offset, dot=dot) if not proposals: return False @@ -106,12 +83,10 @@ def complete(dot=False): prefix = prefix[:common] s_offset = codeassist.starting_offset(source, offset) p_prefix = prefix[offset - s_offset:] - line = vim.current.buffer[row - 1] - vim.current.buffer[row - 1] = line[:column] + p_prefix + line[column:] # noqa - vim.current.window.cursor = (row, column + len(p_prefix)) - vim.command('call complete(%s, %s)' % ( - column - len(prefix) + len(p_prefix) + 1, json.dumps(proposals))) - + line = env.lines[row - 1] + env.curbuf[row - 1] = line[:col] + p_prefix + line[col:] # noqa + env.current.window.cursor = (row, col + len(p_prefix)) + env.run('complete', col - len(prefix) + len(p_prefix) + 1, proposals) return True @@ -149,50 +124,46 @@ def get_proporsals(source, offset, base='', dot=False): return out -@catch_and_print_exceptions +@env.catch_exceptions def goto(): """ Goto definition. """ with RopeContext() as ctx: - source, offset = get_assist_params() + source, offset = env.get_offset_params() found_resource, line = codeassist.get_definition_location( ctx.project, source, offset, ctx.resource, maxfixes=3) if not found_resource: - pymode_error('Definition not found') + env.error('Definition not found') return - if not os.path.abspath(found_resource.path) == vim.current.buffer.name: - vim.command("%s +%s %s" % ( - ctx.options.get('goto_definition_cmd'), - line, found_resource.path)) - - else: - vim.command('normal %sggzz' % line) + env.goto_file( + found_resource.path, cmd=ctx.options.get('goto_definition_cmd')) + env.goto_line(line) -@catch_and_print_exceptions +@env.catch_exceptions def show_doc(): """ Show documentation. """ with RopeContext() as ctx: - source, offset = get_assist_params() + source, offset = env.get_offset_params() try: doc = codeassist.get_doc( ctx.project, source, offset, ctx.resource, maxfixes=3) if not doc: raise exceptions.BadIdentifierError - vim.command('let l:output = %s' % json.dumps(doc.split('\n'))) + env.let('l:output', doc.split('\n')) except exceptions.BadIdentifierError: - pymode_error("No documentation found.") + env.error("No documentation found.") def find_it(): """ Find occurrences. """ with RopeContext() as ctx: - _, offset = get_assist_params() + _, offset = env.get_offset_params() try: occurrences = findit.find_occurrences( ctx.project, ctx.resource, offset) @@ -203,10 +174,10 @@ def find_it(): for oc in occurrences: lst.append(dict( filename=oc.resource.path, - text=ctx.current[oc.lineno - 1], + text=env.lines[oc.lineno - 1] if oc.resource.real_path == env.curbuf.name else "", # noqa lnum=oc.lineno, )) - vim.command('let l:output = %s' % json.dumps(lst)) + env.let('loclist._loclist', lst) def update_python_path(paths): @@ -239,7 +210,7 @@ def organize_imports(): reload_changes(changes) -@catch_and_print_exceptions +@env.catch_exceptions def regenerate(): """ Clear cache. """ with RopeContext() as ctx: @@ -250,10 +221,10 @@ def regenerate(): def new(): """ Create a new project. """ - root = vim.eval('input("Enter project root: ", getcwd())') + root = env.var('input("Enter project root: ", getcwd())') prj = project.Project(projectroot=root) prj.close() - pymode_message("Project is opened: %s" % root) + env.message("Project is opened: %s" % root) def undo(): @@ -266,10 +237,10 @@ def undo(): with RopeContext() as ctx: changes = ctx.project.history.tobe_undone if changes is None: - pymode_error('Nothing to undo!') + env.error('Nothing to undo!') return False - if pymode_confirm(yes=False, msg='Undo [%s]?' % str(changes)): + if env.user_confirm('Undo [%s]?' % str(changes)): progress = ProgressHandler('Undo %s' % str(changes)) for c in ctx.project.history.undo(task_handle=progress.handle): reload_changes(c) @@ -285,10 +256,10 @@ def redo(): with RopeContext() as ctx: changes = ctx.project.history.tobe_redone if changes is None: - pymode_error('Nothing to redo!') + env.error('Nothing to redo!') return False - if pymode_confirm(yes=False, msg='Redo [%s]?' % str(changes)): + if env.user_confirm('Redo [%s]?' % str(changes)): progress = ProgressHandler('Redo %s' % str(changes)) for c in ctx.project.history.redo(task_handle=progress.handle): reload_changes(c) @@ -304,12 +275,12 @@ def cache_project(cls): resources = dict() def get_ctx(*args, **kwargs): - path = vim.current.buffer.name + path = env.curbuf.name if resources.get(path): return resources.get(path) - project_path = os.path.dirname(vim.eval('getcwd()')) - if int(vim.eval('g:pymode_rope_lookup_project')): + project_path = os.path.dirname(env.curdir) + if env.var('g:pymode_rope_lookup_project', True): project_path = look_ropeproject(project_path) ctx = projects.get(project_path) @@ -326,9 +297,9 @@ def autoimport(): :return bool: """ - word = vim.eval('a:word') + word = env.var('a:word') if not word: - pymode_error("Should be word under cursor.") + env.error("Should be word under cursor.") return False with RopeContext() as ctx: @@ -336,14 +307,14 @@ def autoimport(): ctx.generate_autoimport_cache() modules = ctx.importer.get_modules(word) if not modules: - pymode_message('Global name %s not found.' % word) + env.message('Global name %s not found.' % word) return False if len(modules) == 1: _insert_import(word, modules[0], ctx) else: - module = pymode_inputlist('Wich module to import:', modules) + module = env.user_input_choices('Wich module to import:', *modules) _insert_import(word, module, ctx) return True @@ -357,7 +328,6 @@ class RopeContext(object): def __init__(self, path, project_path): self.path = path - debug('Init rope context %s' % self.path) self.project = project.Project( project_path, fscommands=FileSystemCommands()) @@ -370,10 +340,10 @@ def __init__(self, path, project_path): self.resource = None self.current = None self.options = dict( - completeopt=vim.eval('&completeopt'), - autoimport=int(vim.eval('g:pymode_rope_autoimport')), - autoimport_modules=vim.eval('g:pymode_rope_autoimport_modules'), - goto_definition_cmd=vim.eval('g:pymode_rope_goto_definition_cmd'), + completeopt=env.var('&completeopt'), + autoimport=env.var('g:pymode_rope_autoimport', True), + autoimport_modules=env.var('g:pymode_rope_autoimport_modules'), + goto_definition_cmd=env.var('g:pymode_rope_goto_definition_cmd'), ) if os.path.exists("%s/__init__.py" % project_path): @@ -382,20 +352,18 @@ def __init__(self, path, project_path): if self.options.get('autoimport') == '1': self.generate_autoimport_cache() - debug('Context inited %s' % project_path) + env.debug('Context init', project_path) def __enter__(self): self.project.validate(self.project.root) - self.options['encoding'] = vim.eval('&encoding') - self.current = vim.current.buffer self.resource = libutils.path_to_resource( - self.project, self.current.name, 'file') + self.project, env.curbuf.name, 'file') if not self.resource.exists() or os.path.isdir( self.resource.real_path): self.resource = None else: - debug('Found resource "%s"' % self.resource.path) + env.debug('Found resource', self.resource.path) return self @@ -406,7 +374,7 @@ def __exit__(self, t, value, traceback): def generate_autoimport_cache(self): """ Update autoimport cache. """ - pymode_message('Regenerate autoimport cache.') + env.message('Regenerate autoimport cache.') modules = self.options.get('autoimport_modules', []) def _update_cache(importer, modules=None): @@ -432,7 +400,7 @@ def __init__(self, msg): def __call__(self): """ Show current progress. """ percent_done = self.handle.current_jobset().get_percent_done() - pymode_message('%s - done %s%%' % (self.message, percent_done)) + env.message('%s - done %s%%' % (self.message, percent_done)) _scope_weight = { @@ -458,11 +426,11 @@ def run(self): with RopeContext() as ctx: if not ctx.resource: - pymode_error("You should save the file before refactoring.") + env.error("You should save the file before refactoring.") return None try: - pymode_message(self.__doc__) + env.message(self.__doc__) refactor = self.get_refactor(ctx) input_str = self.get_input_str(refactor, ctx) if not input_str: @@ -470,8 +438,8 @@ def run(self): changes = self.get_changes(refactor, input_str) - action = pymode_inputlist( - 'Choose what to do:', ['perform', 'preview']) + action = env.user_input_choices( + 'Choose what to do:', 'perform', 'preview') if not action: return False @@ -481,17 +449,17 @@ def run(self): print("-------------------------------") print("\n%s\n" % changes.get_description()) print("-------------------------------\n\n") - if not pymode_confirm(False): + if not env.user_confirm('Do the changes?'): return False progress = ProgressHandler('Apply changes ...') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes) except exceptions.RefactoringError as e: - pymode_error(str(e)) + env.error(str(e)) except Exception as e: - pymode_error('Unhandled exception in Pymode: %s' % e) + env.error('Unhandled exception in Pymode: %s' % e) @staticmethod def get_refactor(ctx): @@ -537,7 +505,8 @@ def get_refactor(self, ctx): """ offset = None if not self.module: - _, offset = get_assist_params() + _, offset = env.get_offset_params() + env.debug('Prepare rename', offset) return rename.Rename(ctx.project, ctx.resource, offset) def get_input_str(self, refactor, ctx): @@ -547,10 +516,10 @@ def get_input_str(self, refactor, ctx): msg = 'Renaming method/variable. New name:' if self.module: msg = 'Renaming module. New name:' - newname = pymode_input(msg, oldname) + newname = env.user_input(msg, oldname) if newname == oldname: - pymode_message("Nothing to do.") + env.message("Nothing to do.") return False return newname @@ -564,7 +533,7 @@ class ExtractMethodRefactoring(Refactoring): def get_input_str(refactor, ctx): """ Return user input. """ - return pymode_input('New method name:') + return env.user_input('New method name:') @staticmethod def get_refactor(ctx): @@ -573,10 +542,9 @@ def get_refactor(ctx): :return Rename: """ - buf = vim.current.buffer - cursor1, cursor2 = buf.mark('<'), buf.mark('>') - _, offset1 = get_assist_params(cursor1) - _, offset2 = get_assist_params(cursor2) + cursor1, cursor2 = env.curbuf.mark('<'), env.curbuf.mark('>') + _, offset1 = env.get_offset_params(cursor1) + _, offset2 = env.get_offset_params(cursor2) return extract.ExtractMethod( ctx.project, ctx.resource, offset1, offset2) @@ -599,7 +567,7 @@ class ExtractVariableRefactoring(Refactoring): def get_input_str(refactor, ctx): """ Return user input. """ - return pymode_input('New variable name:') + return env.user_input('New variable name:') @staticmethod def get_refactor(ctx): @@ -608,10 +576,9 @@ def get_refactor(ctx): :return Rename: """ - buf = vim.current.buffer - cursor1, cursor2 = buf.mark('<'), buf.mark('>') - _, offset1 = get_assist_params(cursor1) - _, offset2 = get_assist_params(cursor2) + cursor1, cursor2 = env.curbuf.mark('<'), env.curbuf.mark('>') + _, offset1 = env.get_offset_params(cursor1) + _, offset2 = env.get_offset_params(cursor2) return extract.ExtractVariable( ctx.project, ctx.resource, offset1, offset2) @@ -637,7 +604,7 @@ def get_refactor(ctx): :return Rename: """ - _, offset = get_assist_params() + _, offset = env.get_offset_params() return inline.create_inline(ctx.project, ctx.resource, offset) @staticmethod @@ -662,7 +629,7 @@ def get_refactor(ctx): :return Rename: """ - _, offset = get_assist_params() + _, offset = env.get_offset_params() return usefunction.UseFunction(ctx.project, ctx.resource, offset) @staticmethod @@ -712,7 +679,7 @@ def get_input_str(refactor, ctx): """ - return pymode_input('Enter destination:') + return env.user_input('Enter destination:') @staticmethod def get_refactor(ctx): @@ -721,7 +688,7 @@ def get_refactor(ctx): :return Rename: """ - _, offset = get_assist_params() + _, offset = env.get_offset_params() if offset == 0: offset = None return move.create_move(ctx.project, ctx.resource, offset) @@ -740,7 +707,7 @@ def get_input_str(refactor, ctx): """ args = refactor.get_args() default = ', '.join(a[0] for a in args) - return pymode_input('Change the signature:', udefault=default) + return env.user_input('Change the signature:', default) @staticmethod def get_refactor(ctx): @@ -749,7 +716,7 @@ def get_refactor(ctx): :return Rename: """ - _, offset = get_assist_params() + _, offset = env.get_offset_params() return change_signature.ChangeSignature( ctx.project, ctx.resource, offset) @@ -796,7 +763,7 @@ def get_refactor(self, ctx): :return Rename: """ - _, offset = get_assist_params() + _, offset = env.get_offset_params() return generate.create_generate( self.kind, ctx.project, ctx.resource, offset) @@ -810,30 +777,27 @@ def get_changes(self, refactor, input_str): return refactor.get_changes() +@env.catch_exceptions def reload_changes(changes): """ Reload changed buffers. """ resources = changes.get_changed_resources() moved = _get_moved_resources(changes) # noqa - current = vim.current.buffer.number + current = env.curbuf.number for f in resources: - try: - bufnr = vim.eval('bufnr("%s")' % f.real_path) - if bufnr == '-1': - continue - vim.command('buffer %s' % bufnr) + bufnr = env.var('bufnr("%s")' % f.real_path) + env.goto_buffer(bufnr) - if f in moved: - vim.command('e! %s' % moved[f].real_path) - else: - vim.command('e!') + path = env.curbuf.name + if f in moved: + path = moved[f].real_path - vim.command('echom "%s has been changed."' % f.real_path) + env.debug('Reload', f.real_path, path, bufnr) + env.goto_file(path, 'e!', force=True) + env.message("%s has been changed." % f.real_path, history=True) - except vim.error: - continue - vim.command('buffer %s' % current) + env.goto_buffer(current) def _get_moved_resources(changes): @@ -878,7 +842,7 @@ def _get_autoimport_proposals(out, ctx, source, offset, dot=False): return out -@catch_and_print_exceptions +@env.catch_exceptions def complete_check(): """ Function description. @@ -886,8 +850,8 @@ def complete_check(): """ - row, column = vim.current.window.cursor - line = vim.current.buffer[row - 1] + row, column = env.cursor + line = env.lines[row - 1] word_finder = worder.Worder(line, True) parent, name, _ = word_finder.get_splitted_primary_before(column - 1) if parent: @@ -902,27 +866,24 @@ def complete_check(): if name in ctx.project.pycore.resource_to_pyobject(ctx.resource): return False - if not pymode_confirm(True, "Import %s?" % name): + if not env.user_confirm("Import %s?" % name, True): return False if len(modules) == 1: _insert_import(name, modules[0], ctx) else: - module = pymode_inputlist('With module to import:', modules) + module = env.user_input_choices('With module to import:', *modules) if module: _insert_import(name, module, ctx) - vim.command('call pymode#save()') - regenerate() - def _insert_import(name, module, ctx): if not ctx.resource: - source, _ = get_assist_params() + source, _ = env.get_offset_params() lineno = ctx.importer.find_insertion_line(source) line = 'from %s import %s' % (module, name) - vim.current.buffer[lineno - 1:lineno - 1] = [line] + env.curbuf[lineno - 1:lineno - 1] = [line] return True pyobject = ctx.project.pycore.resource_to_pyobject(ctx.resource) @@ -933,8 +894,8 @@ def _insert_import(name, module, ctx): changes = change.ChangeContents( ctx.resource, module_imports.get_changed_source()) - action = pymode_inputlist( - 'Choose what to do:', ['perform', 'preview']) + action = env.user_input_choices( + 'Choose what to do:', 'perform', 'preview') if not action: return False @@ -944,7 +905,7 @@ def _insert_import(name, module, ctx): print("-------------------------------") print("\n%s\n" % changes.get_description()) print("-------------------------------\n\n") - if not pymode_confirm(False): + if not env.user_confirm('Do the changes?'): return False progress = ProgressHandler('Apply changes ...') diff --git a/pymode/run.py b/pymode/run.py index 344260ae..f49b8714 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -5,31 +5,26 @@ except ImportError: from io import StringIO -import json import sys -import vim # noqa -from .utils import pymode_error - - -VIM_INPUT = lambda s: vim.eval('input("%s")' % s) +from .environment import env def run_code(): """ Run python code in current buffer. """ errors = [] - line1, line2 = vim.eval('a:line1'), vim.eval('a:line2') + line1, line2 = env.var('a:line1'), env.var('a:line2') lines = __prepare_lines(line1, line2) - context = dict(__name__='__main__', input=VIM_INPUT, raw_input=VIM_INPUT) + context = dict( + __name__='__main__', input=env.user_input, raw_input=env.user_input) sys.stdout, stdout_ = StringIO(), sys.stdout sys.stderr, stderr_ = StringIO(), sys.stderr try: - code = compile( - '\n'.join(lines) + '\n', vim.current.buffer.name, 'exec') + code = compile('\n'.join(lines) + '\n', env.curbuf.name, 'exec') exec(code, context) # noqa except SystemExit as e: @@ -37,8 +32,8 @@ def run_code(): # A non-false code indicates abnormal termination. # A false code will be treated as a # successful run, and the error will be hidden from Vim - pymode_error("Script exited with code %s" % e.code) - vim.command('return') + env.error("Script exited with code %s" % e.code) + env.stop() except Exception: import traceback @@ -51,20 +46,19 @@ def run_code(): sys.stdout, sys.stderr = stdout_, stderr_ try: - output = output.decode('utf-8').encode(vim.eval('&enc')) + output = output.decode('utf-8').encode(env.options.get('encoding')) except AttributeError: pass errors += [er for er in err.splitlines() if er and "" not in er] - vim.command('let l:traceback = %s' % json.dumps(errors[2:])) - vim.command('let l:output = %s' % json.dumps( - [s for s in output.split('\n') if s])) + env.let('l:traceback', errors[2:]) + env.let('l:output', [s for s in output.split('\n') if s]) def __prepare_lines(line1, line2): - lines = [l.rstrip() for l in vim.current.buffer[int(line1) - 1:int(line2)]] + lines = [l.rstrip() for l in env.lines[int(line1) - 1:int(line2)]] indent = 0 for line in lines: diff --git a/pymode/utils.py b/pymode/utils.py index 101b5b7a..80c59da2 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -18,18 +18,6 @@ PY2 = sys.version_info[0] == 2 -def args_from_vim(func): - """ Proxy arguments from Vim function to Python function. - - :return func: A wrapper - - """ - def __wrapper(): - args = vim.eval('a:000') - return func(*args) - return __wrapper - - def pymode_message(content): """ Show message. """ @@ -99,26 +87,6 @@ def pymode_error(content): vim.command('call pymode#error("%s")' % str(content)) -def with_metaclass(meta, *bases): - """ Metaclass support. - - :return class: - - """ - - class metaclass(meta): - - __call__ = type.__call__ - __init__ = type.__init__ - - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - - return metaclass('temporary_class', None, {}) - - def catch_and_print_exceptions(func): """ Catch any exception. diff --git a/pymode/virtualenv.py b/pymode/virtualenv.py index d6fb8228..87734f54 100644 --- a/pymode/virtualenv.py +++ b/pymode/virtualenv.py @@ -1,12 +1,11 @@ """ Support virtualenv in pymode. """ import os.path -import vim # noqa -from .utils import pymode_message, catch_and_print_exceptions +from .environment import env -@catch_and_print_exceptions +@env.catch_exceptions def enable_virtualenv(): """ Enable virtualenv for vim. @@ -14,11 +13,11 @@ def enable_virtualenv(): """ - path = vim.eval('g:pymode_virtualenv_path') - enabled = vim.eval('g:pymode_virtualenv_enabled') + path = env.var('g:pymode_virtualenv_path') + enabled = env.var('g:pymode_virtualenv_enabled') if path == enabled: - pymode_message('Virtualenv %s already enabled.' % path) - return False + env.message('Virtualenv %s already enabled.' % path) + return env.stop() activate_this = os.path.join(os.path.join(path, 'bin'), 'activate_this.py') @@ -32,8 +31,8 @@ def enable_virtualenv(): source = f.read() exec(compile( # noqa source, activate_this, 'exec'), dict(__file__=activate_this)) - pymode_message('Activate virtualenv: ' + path) - vim.command('let g:pymode_virtualenv_enabled = "%s"' % path) + env.message('Activate virtualenv: ' + path) + env.let('g:pymode_virtualenv_enabled', path) return True finally: f.close() From efe4bb396707c1f1ce3a82c9beab5f447088fb35 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 04:27:53 +0700 Subject: [PATCH 027/428] Update autogroup --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 56b8fd9c..13d45ee1 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-02 0.7.3b +## 2013-12-02 0.7.4b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index a463f1b9..0c60ea4a 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.3b + Version: 0.7.4b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 201601b8..de88967e 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.3b" +let g:pymode_version = "0.7.4b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From 5a34c00f6627bafe4b20e7141c0d55eeaccbf65c Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 04:30:36 +0700 Subject: [PATCH 028/428] Add pylama.ini --- pylama.ini | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 pylama.ini diff --git a/pylama.ini b/pylama.ini new file mode 100644 index 00000000..8ccac54e --- /dev/null +++ b/pylama.ini @@ -0,0 +1,2 @@ +[main] +ignore = R0201,R0922,C0111,E1103 From 64cafbea2087fa4613140bd93708c91aaf23d580 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 13:29:15 +0700 Subject: [PATCH 029/428] Code cleanup --- pymode/run.py | 8 ++--- pymode/utils.py | 96 ------------------------------------------------- 2 files changed, 2 insertions(+), 102 deletions(-) diff --git a/pymode/run.py b/pymode/run.py index f49b8714..3ad61117 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -33,7 +33,7 @@ def run_code(): # A false code will be treated as a # successful run, and the error will be hidden from Vim env.error("Script exited with code %s" % e.code) - env.stop() + return env.stop() except Exception: import traceback @@ -43,13 +43,9 @@ def run_code(): err = sys.stderr.getvalue() output = sys.stdout.getvalue().strip() + output = env.prepare_value(output) sys.stdout, sys.stderr = stdout_, stderr_ - try: - output = output.decode('utf-8').encode(env.options.get('encoding')) - except AttributeError: - pass - errors += [er for er in err.splitlines() if er and "" not in er] env.let('l:traceback', errors[2:]) diff --git a/pymode/utils.py b/pymode/utils.py index 80c59da2..c430abbf 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -1,5 +1,4 @@ """ Pymode utils. """ -import json import os.path import sys import threading @@ -18,92 +17,6 @@ PY2 = sys.version_info[0] == 2 -def pymode_message(content): - """ Show message. """ - - vim.command('call pymode#wide_message("%s")' % str(content)) - - -def pymode_confirm(yes=True, msg='Do the changes:'): - """ Confirmation. - - :return bool: - - """ - default = 'yes' if yes else 'no' - action = pymode_input(msg, default) - return action and 'yes'.startswith(action) - - -def pymode_inputlist(msg, opts): - """ Get user choice. - - :return str: A choosen option - - """ - choices = ['[Pymode] %s' % msg] - choices += ["%s. %s" % (num, opt) for num, opt in enumerate(opts, 1)] - try: - input_str = int(vim.eval('inputlist(%s)' % json.dumps(choices))) - except (KeyboardInterrupt, ValueError): - input_str = 0 - - if not input_str: - pymode_message('Cancelled!') - return False - - try: - return opts[input_str - 1] - except (IndexError, ValueError): - pymode_error('Invalid option: %s' % input_str) - return pymode_inputlist(msg, opts) - - -def pymode_input(umsg, udefault='', opts=None): - """ Get user input. - - :return str: A user input - - """ - msg = '[Pymode] %s ' % umsg - default = udefault - - if default != '': - msg += '[%s] ' % default - - try: - vim.command('echohl Debug') - input_str = vim.eval('input("%s> ")' % msg) - vim.command('echohl none') - except KeyboardInterrupt: - input_str = '' - - return input_str or default - - -def pymode_error(content): - """ Show error. """ - - vim.command('call pymode#error("%s")' % str(content)) - - -def catch_and_print_exceptions(func): - """ Catch any exception. - - :return func: - - """ - def wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - except (Exception, vim.error) as e: # noqa - if DEBUG: - raise - pymode_error(e) - return None - return wrapper - - @contextmanager def silence_stderr(): """ Redirect stderr. """ @@ -127,12 +40,3 @@ def patch_paths(): sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs2')) else: sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs3')) - - -debug = lambda _: None - -if DEBUG: - def debug(msg): # noqa - """ Debug message. """ - - print(msg) From a61f4be70aa280231158a829962215ce180614b8 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 17:18:01 +0700 Subject: [PATCH 030/428] Remove unused pylint settings --- pylint.ini | 25 ------------------------- 1 file changed, 25 deletions(-) delete mode 100644 pylint.ini diff --git a/pylint.ini b/pylint.ini deleted file mode 100644 index c58c4d0e..00000000 --- a/pylint.ini +++ /dev/null @@ -1,25 +0,0 @@ -[MESSAGES CONTROL] -# Disable the message(s) with the given id(s). -# http://pylint-messages.wikidot.com/all-codes -# -# C0103: Invalid name "%s" (should match %s) -# C0111: Missing docstring -# E1101: %s %r has no %r member -# R0901: Too many ancestors (%s/%s) -# R0902: Too many instance attributes (%s/%s) -# R0903: Too few public methods (%s/%s) -# R0904: Too many public methods (%s/%s) -# R0913: Too many arguments (%s/%s) -# R0915: Too many statements (%s/%s) -# W0141: Used builtin function %r -# W0142: Used * or ** magic -# W0221: Arguments number differs from %s method -# W0232: Class has no __init__ method -# W0401: Wildcard import %s -# W0613: Unused argument %r -# W0631: Using possibly undefined loop variable %r -# -disable = C0103,C0111,E1101,R0901,R0902,R0903,R0904,R0913,R0915,W0141,W0142,W0221,W0232,W0401,W0613,W0631 - -[TYPECHECK] -generated-members = REQUEST,acl_users,aq_parent,objects,DoesNotExist,_meta,status_code,content,context From 6a36d3d8ead541bb1932d1e7053c0522e5067b5a Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 17:20:41 +0700 Subject: [PATCH 031/428] Support vim-addons --- python-mode.yaml | 268 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 268 insertions(+) create mode 100644 python-mode.yaml diff --git a/python-mode.yaml b/python-mode.yaml new file mode 100644 index 00000000..b5635e68 --- /dev/null +++ b/python-mode.yaml @@ -0,0 +1,268 @@ +addon: python-mode +description: "swissknife for python" +files: + - after/ftplugin/pyrex.vim + - after/ftplugin/python.vim + - after/indent/pyrex.vim + - after/indent/python.vim + - autoload/pymode/breakpoint.vim + - autoload/pymode/doc.vim + - autoload/pymode/folding.vim + - autoload/pymode/indent.vim + - autoload/pymode/lint.vim + - autoload/pymode/motion.vim + - autoload/pymode/rope.vim + - autoload/pymode/run.vim + - autoload/pymode/tools/loclist.vim + - autoload/pymode/tools/signs.vim + - autoload/pymode/troubleshooting.vim + - autoload/pymode/virtualenv.vim + - autoload/pymode.vim + - ftplugin/pyrex.vim + - ftplugin/python/pymode.vim + - plugin/pymode.vim + - syntax/pyrex.vim + - syntax/python.vim + - pymode/__init__.py + - pymode/async.py + - pymode/autopep8.py + - pymode/environment.py + - pymode/libs/pylama/__init__.py + - pymode/libs/pylama/config.py + - pymode/libs/pylama/core.py + - pymode/libs/pylama/hook.py + - pymode/libs/pylama/libs/__init__.py + - pymode/libs/pylama/libs/importlib.py + - pymode/libs/pylama/libs/inirama.py + - pymode/libs/pylama/lint/__init__.py + - pymode/libs/pylama/lint/extensions.py + - pymode/libs/pylama/lint/pylama_mccabe/__init__.py + - pymode/libs/pylama/lint/pylama_mccabe/mccabe.py + - pymode/libs/pylama/lint/pylama_pep257/__init__.py + - pymode/libs/pylama/lint/pylama_pep257/pep257.py + - pymode/libs/pylama/lint/pylama_pep8/__init__.py + - pymode/libs/pylama/lint/pylama_pep8/pep8.py + - pymode/libs/pylama/lint/pylama_pyflakes/__init__.py + - pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py + - pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py + - pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py + - pymode/libs/pylama/lint/pylama_pylint/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/main.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/as_string.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/builder.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/exceptions.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/inference.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/manager.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/mixins.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/node_classes.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/nodes.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/protocols.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/raw_building.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/rebuilder.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/scoped_nodes.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/utils.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/config.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/__pkginfo__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py + - pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py + - pymode/libs/pylama/main.py + - pymode/libs/pylama/tasks.py + - pymode/libs/pylama/utils.py + - pymode/libs2/rope/__init__.py + - pymode/libs2/rope/base/__init__.py + - pymode/libs2/rope/base/arguments.py + - pymode/libs2/rope/base/ast.py + - pymode/libs2/rope/base/astutils.py + - pymode/libs2/rope/base/builtins.py + - pymode/libs2/rope/base/change.py + - pymode/libs2/rope/base/codeanalyze.py + - pymode/libs2/rope/base/default_config.py + - pymode/libs2/rope/base/evaluate.py + - pymode/libs2/rope/base/exceptions.py + - pymode/libs2/rope/base/fscommands.py + - pymode/libs2/rope/base/history.py + - pymode/libs2/rope/base/libutils.py + - pymode/libs2/rope/base/oi/__init__.py + - pymode/libs2/rope/base/oi/doa.py + - pymode/libs2/rope/base/oi/memorydb.py + - pymode/libs2/rope/base/oi/objectdb.py + - pymode/libs2/rope/base/oi/objectinfo.py + - pymode/libs2/rope/base/oi/runmod.py + - pymode/libs2/rope/base/oi/soa.py + - pymode/libs2/rope/base/oi/soi.py + - pymode/libs2/rope/base/oi/transform.py + - pymode/libs2/rope/base/prefs.py + - pymode/libs2/rope/base/project.py + - pymode/libs2/rope/base/pycore.py + - pymode/libs2/rope/base/pynames.py + - pymode/libs2/rope/base/pynamesdef.py + - pymode/libs2/rope/base/pyobjects.py + - pymode/libs2/rope/base/pyobjectsdef.py + - pymode/libs2/rope/base/pyscopes.py + - pymode/libs2/rope/base/resourceobserver.py + - pymode/libs2/rope/base/resources.py + - pymode/libs2/rope/base/simplify.py + - pymode/libs2/rope/base/stdmods.py + - pymode/libs2/rope/base/taskhandle.py + - pymode/libs2/rope/base/utils.py + - pymode/libs2/rope/base/worder.py + - pymode/libs2/rope/contrib/__init__.py + - pymode/libs2/rope/contrib/autoimport.py + - pymode/libs2/rope/contrib/changestack.py + - pymode/libs2/rope/contrib/codeassist.py + - pymode/libs2/rope/contrib/finderrors.py + - pymode/libs2/rope/contrib/findit.py + - pymode/libs2/rope/contrib/fixmodnames.py + - pymode/libs2/rope/contrib/fixsyntax.py + - pymode/libs2/rope/contrib/generate.py + - pymode/libs2/rope/refactor/__init__.py + - pymode/libs2/rope/refactor/change_signature.py + - pymode/libs2/rope/refactor/encapsulate_field.py + - pymode/libs2/rope/refactor/extract.py + - pymode/libs2/rope/refactor/functionutils.py + - pymode/libs2/rope/refactor/importutils/__init__.py + - pymode/libs2/rope/refactor/importutils/actions.py + - pymode/libs2/rope/refactor/importutils/importinfo.py + - pymode/libs2/rope/refactor/importutils/module_imports.py + - pymode/libs2/rope/refactor/inline.py + - pymode/libs2/rope/refactor/introduce_factory.py + - pymode/libs2/rope/refactor/introduce_parameter.py + - pymode/libs2/rope/refactor/localtofield.py + - pymode/libs2/rope/refactor/method_object.py + - pymode/libs2/rope/refactor/move.py + - pymode/libs2/rope/refactor/multiproject.py + - pymode/libs2/rope/refactor/occurrences.py + - pymode/libs2/rope/refactor/patchedast.py + - pymode/libs2/rope/refactor/rename.py + - pymode/libs2/rope/refactor/restructure.py + - pymode/libs2/rope/refactor/similarfinder.py + - pymode/libs2/rope/refactor/sourceutils.py + - pymode/libs2/rope/refactor/suites.py + - pymode/libs2/rope/refactor/topackage.py + - pymode/libs2/rope/refactor/usefunction.py + - pymode/libs2/rope/refactor/wildcards.py + - pymode/libs3/rope/__init__.py + - pymode/libs3/rope/base/__init__.py + - pymode/libs3/rope/base/arguments.py + - pymode/libs3/rope/base/ast.py + - pymode/libs3/rope/base/astutils.py + - pymode/libs3/rope/base/builtins.py + - pymode/libs3/rope/base/change.py + - pymode/libs3/rope/base/codeanalyze.py + - pymode/libs3/rope/base/default_config.py + - pymode/libs3/rope/base/evaluate.py + - pymode/libs3/rope/base/exceptions.py + - pymode/libs3/rope/base/fscommands.py + - pymode/libs3/rope/base/history.py + - pymode/libs3/rope/base/libutils.py + - pymode/libs3/rope/base/oi/__init__.py + - pymode/libs3/rope/base/oi/doa.py + - pymode/libs3/rope/base/oi/memorydb.py + - pymode/libs3/rope/base/oi/objectdb.py + - pymode/libs3/rope/base/oi/objectinfo.py + - pymode/libs3/rope/base/oi/runmod.py + - pymode/libs3/rope/base/oi/soa.py + - pymode/libs3/rope/base/oi/soi.py + - pymode/libs3/rope/base/oi/transform.py + - pymode/libs3/rope/base/prefs.py + - pymode/libs3/rope/base/project.py + - pymode/libs3/rope/base/pycore.py + - pymode/libs3/rope/base/pynames.py + - pymode/libs3/rope/base/pynamesdef.py + - pymode/libs3/rope/base/pyobjects.py + - pymode/libs3/rope/base/pyobjectsdef.py + - pymode/libs3/rope/base/pyscopes.py + - pymode/libs3/rope/base/resourceobserver.py + - pymode/libs3/rope/base/resources.py + - pymode/libs3/rope/base/simplify.py + - pymode/libs3/rope/base/stdmods.py + - pymode/libs3/rope/base/taskhandle.py + - pymode/libs3/rope/base/utils.py + - pymode/libs3/rope/base/worder.py + - pymode/libs3/rope/contrib/__init__.py + - pymode/libs3/rope/contrib/autoimport.py + - pymode/libs3/rope/contrib/changestack.py + - pymode/libs3/rope/contrib/codeassist.py + - pymode/libs3/rope/contrib/finderrors.py + - pymode/libs3/rope/contrib/findit.py + - pymode/libs3/rope/contrib/fixmodnames.py + - pymode/libs3/rope/contrib/fixsyntax.py + - pymode/libs3/rope/contrib/generate.py + - pymode/libs3/rope/refactor/__init__.py + - pymode/libs3/rope/refactor/change_signature.py + - pymode/libs3/rope/refactor/encapsulate_field.py + - pymode/libs3/rope/refactor/extract.py + - pymode/libs3/rope/refactor/functionutils.py + - pymode/libs3/rope/refactor/importutils/__init__.py + - pymode/libs3/rope/refactor/importutils/actions.py + - pymode/libs3/rope/refactor/importutils/importinfo.py + - pymode/libs3/rope/refactor/importutils/module_imports.py + - pymode/libs3/rope/refactor/inline.py + - pymode/libs3/rope/refactor/introduce_factory.py + - pymode/libs3/rope/refactor/introduce_parameter.py + - pymode/libs3/rope/refactor/localtofield.py + - pymode/libs3/rope/refactor/method_object.py + - pymode/libs3/rope/refactor/move.py + - pymode/libs3/rope/refactor/multiproject.py + - pymode/libs3/rope/refactor/occurrences.py + - pymode/libs3/rope/refactor/patchedast.py + - pymode/libs3/rope/refactor/rename.py + - pymode/libs3/rope/refactor/restructure.py + - pymode/libs3/rope/refactor/similarfinder.py + - pymode/libs3/rope/refactor/sourceutils.py + - pymode/libs3/rope/refactor/suites.py + - pymode/libs3/rope/refactor/topackage.py + - pymode/libs3/rope/refactor/usefunction.py + - pymode/libs3/rope/refactor/wildcards.py + - pymode/lint.py + - pymode/rope.py + - pymode/run.py + - pymode/utils.py + - pymode/virtualenv.py + - pymode/libs/pylama/lint/pylama_pylint/pylint.rc From 05dad382b609d47b59582463707d4f50fd1c94c9 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 17:20:51 +0700 Subject: [PATCH 032/428] Ignore build dir --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 3832bf8a..f5674a78 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ Gemfile.lock VimFlavor.lock _ +build tags test.py todo.txt From c0303ed0b123032a5e50d99330e24d1d7d6827e3 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 18:54:31 +0700 Subject: [PATCH 033/428] Update Makefile --- Makefile | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e30f836d..726539d1 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,9 @@ PYLAMA = $(LIBS)/pylama .PHONY: clean clean: - find . -name "*.pyc" -delete + find $(CURDIR) -name "*.pyc" -delete + rm -rf $(CURDIR)/build + rm -rf *.deb # Temporary disable rope tests on Travis .PHONY: travis @@ -29,3 +31,37 @@ $(PYLAMA): $(PYLAMA)/lint/pylama_pylint: cp -r ~/Dropbox/projects/pylama/plugins/pylama_pylint/pylama_pylint/ $(PYLAMA)/lint/pylama_pylint + +$(CURDIR)/build: + mkdir -p $(CURDIR)/build/usr/share/vim/addons + mkdir -p $(CURDIR)/build/usr/share/vim/registry + cp -r after autoload doc ftplugin plugin pymode syntax $(CURDIR)/build/usr/share/vim/addons/. + cp -r python-mode.yaml $(CURDIR)/build/usr/share/vim/registry/. + +TARGET?=$(CURDIR)/deb +PACKAGE_VERSION?=$(shell git describe --tags `git rev-list master --tags --max-count=1`) +PACKAGE_NAME="vim-python-mode" +PACKAGE_MAINTAINER="Kirill Klenov " +PACKAGE_URL=http://github.com/klen/python-mode +deb: clean $(CURDIR)/build + @git co gh-pages + @fpm -s dir -t deb -a all \ + -n $(PACKAGE_NAME) \ + -v $(PACKAGE_VERSION) \ + -m $(PACKAGE_MAINTAINER) \ + --url $(PACKAGE_URL) \ + --license "GNU lesser general public license" \ + --description "Vim-Swissknife for python" \ + --deb-user root \ + --deb-group root \ + -C $(CURDIR)/build \ + -d "python2.7" \ + -d "vim-addon-manager" \ + usr + @mkdir -p $(TARGET) + @prm --type deb --path $(TARGET) \ + --release precise \ + --arch amd64,i386,all \ + --component main \ + --directory $(CURDIR) \ + --gpg horneds@gmail.com From a5b64be159ca4edccbf138045e2bf0e9d923fd27 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 21:35:11 +0700 Subject: [PATCH 034/428] Update releases --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 726539d1..855a46ca 100644 --- a/Makefile +++ b/Makefile @@ -60,7 +60,7 @@ deb: clean $(CURDIR)/build usr @mkdir -p $(TARGET) @prm --type deb --path $(TARGET) \ - --release precise \ + --release precise,quantal,raring,saucy \ --arch amd64,i386,all \ --component main \ --directory $(CURDIR) \ From 30921a5d14d7023cf4df476b15743f5e8aa66ebf Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 21:46:29 +0700 Subject: [PATCH 035/428] Add information about repository --- README.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/README.rst b/README.rst index 62cba9a8..0d364b25 100644 --- a/README.rst +++ b/README.rst @@ -95,6 +95,28 @@ Then rebuild **helptags** in vim:: must be enabled to use python-mode. +Debian packages +--------------- + +Repository URL: http://klen.github.io/python-mode/deb/ +Install with commands: + +:: + + add-apt-repository http://klen.github.io/python-mode/deb main + apt-get update + apt-get install vim-python-mode + +If you are getting the message: "The following signatures couldn' be verified because the public key is not available": :: + + apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B5DF65307000E266 + +`vim-python-mode` using `vim-addons`, so after installation just enable +`python-mode` with command: :: + + vim-addons install python-mode + + Troubleshooting =============== From eb4071aeb6692f1b72e2500d2ca25bd357d52359 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 3 Dec 2013 21:48:26 +0700 Subject: [PATCH 036/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 13d45ee1..4db0abb0 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-02 0.7.4b +## 2013-12-02 0.7.5b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index 0c60ea4a..7719eadf 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.4b + Version: 0.7.5b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index de88967e..265a15a2 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.4b" +let g:pymode_version = "0.7.5b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From 78f934996f18eada435f8af926c4f5e1a3313c54 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 4 Dec 2013 12:49:54 +0700 Subject: [PATCH 037/428] More logging in debug mode. Fix Rope project initilization --- plugin/pymode.vim | 3 +++ pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/config.py | 2 +- pymode/libs/pylama/core.py | 8 ++++++-- pymode/lint.py | 4 ++++ pymode/rope.py | 8 ++++++-- pymode/utils.py | 16 ++++++++++------ t/rope.vim | 5 ++++- 8 files changed, 35 insertions(+), 13 deletions(-) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 265a15a2..7eb98a82 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -147,6 +147,9 @@ call pymode#default('g:pymode_breakpoint_cmd', '') " Rope support call pymode#default('g:pymode_rope', 1) +" System plugin variable +call pymode#default('g:pymode_rope_current', '') + " If project hasnt been finded in current working directory, look at parents directory call pymode#default('g:pymode_rope_lookup_project', 1) diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index c5a5acfb..71ae8c59 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -version_info = 2, 0, 3 +version_info = 2, 0, 4 __version__ = version = '.'.join(map(str, version_info)) __project__ = __name__ diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index d706b68c..408fdcb2 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -128,7 +128,7 @@ def parse_linters(csp_str): if linter: result.append((name, linter)) else: - logging.warn("Linter `%s` not found." % name) + logging.warn("Linter `%s` not found.", name) return result parser.add_argument( diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index fa65e4ba..711fd20c 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -5,6 +5,7 @@ """ import logging import re +import sys from .lint.extensions import LINTERS #: The skip pattern @@ -16,7 +17,7 @@ # Setup a logger LOGGER = logging.getLogger('pylama') -STREAM = logging.StreamHandler() +STREAM = logging.StreamHandler(sys.stdout) LOGGER.addHandler(STREAM) @@ -175,6 +176,9 @@ def __enter__(self): self.code = self._file.read() + '\n\n' return self - def __exit__(self): + def __exit__(self, t, value, traceback): if not self._file is None: self._file.close() + + if t and LOGGER.level == logging.DEBUG: + LOGGER.debug(traceback) diff --git a/pymode/lint.py b/pymode/lint.py index d7c39dbe..bd75c817 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -31,6 +31,10 @@ def code_check(): env.stop() return False + if env.options.get('debug'): + from pylama.core import LOGGER, logging + LOGGER.setLevel(logging.DEBUG) + with silence_stderr(): errors = check_path(path, options=options, code=env.source) diff --git a/pymode/rope.py b/pymode/rope.py index a36d8318..dbf24309 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -23,6 +23,7 @@ def look_ropeproject(path): :return str|None: A finded path """ + env.debug('Look project', path) p = os.path.abspath(path) while True: @@ -31,7 +32,7 @@ def look_ropeproject(path): new_p = os.path.abspath(os.path.join(p, "..")) if new_p == p: - return '.' + return path p = new_p @@ -279,7 +280,8 @@ def get_ctx(*args, **kwargs): if resources.get(path): return resources.get(path) - project_path = os.path.dirname(env.curdir) + project_path = env.curdir + env.debug('Look ctx', project_path) if env.var('g:pymode_rope_lookup_project', True): project_path = look_ropeproject(project_path) @@ -353,8 +355,10 @@ def __init__(self, path, project_path): self.generate_autoimport_cache() env.debug('Context init', project_path) + env.message('Init Rope project: %s' % project_path) def __enter__(self): + env.let('g:pymode_rope_current', self.project.root.real_path) self.project.validate(self.project.root) self.resource = libutils.path_to_resource( self.project, env.curbuf.name, 'file') diff --git a/pymode/utils.py b/pymode/utils.py index c430abbf..281e0152 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -21,14 +21,18 @@ def silence_stderr(): """ Redirect stderr. """ - with threading.Lock(): - stderr = sys.stderr - sys.stderr = StringIO() + if DEBUG: + yield - yield + else: + with threading.Lock(): + stderr = sys.stderr + sys.stderr = StringIO() + + yield - with threading.Lock(): - sys.stderr = stderr + with threading.Lock(): + sys.stderr = stderr def patch_paths(): diff --git a/t/rope.vim b/t/rope.vim index 8656aa89..8fd09259 100644 --- a/t/rope.vim +++ b/t/rope.vim @@ -1,6 +1,7 @@ let g:pymode_rope_completion_bind = 'X' let g:pymode_rope_autoimport = 0 let g:pymode_debug = 1 +let g:pymode_rope_lookup_project = 0 source plugin/pymode.vim @@ -16,10 +17,12 @@ describe 'pymode-plugin' end it 'pymode rope auto open project in current working directory' - let project_path = '.ropeproject' + let project_path = getcwd() . '/.ropeproject' Expect isdirectory(project_path) == 0 normal oimporX Expect getline('.') == 'import' + Expect g:pymode_rope_current == getcwd() . '/' + Expect g:pymode_rope_current . '.ropeproject' == project_path Expect isdirectory(project_path) == 1 end From 419043707ca08a1ed614f5ae8c49c114acab3b7d Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 4 Dec 2013 13:01:34 +0700 Subject: [PATCH 038/428] Update Makefile --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 855a46ca..1c58bcbe 100644 --- a/Makefile +++ b/Makefile @@ -45,6 +45,7 @@ PACKAGE_MAINTAINER="Kirill Klenov " PACKAGE_URL=http://github.com/klen/python-mode deb: clean $(CURDIR)/build @git co gh-pages + @rm -rf deb @fpm -s dir -t deb -a all \ -n $(PACKAGE_NAME) \ -v $(PACKAGE_VERSION) \ From f859a614fe67da5a73302789f7563a3fe9b45b7d Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 4 Dec 2013 13:03:18 +0700 Subject: [PATCH 039/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 4db0abb0..d897f62a 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-02 0.7.5b +## 2013-12-04 0.7.6b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index 7719eadf..a364e308 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.5b + Version: 0.7.6b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 7eb98a82..d085e415 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.5b" +let g:pymode_version = "0.7.6b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From 147846c73ca31130f0ed896e6986c0e755c5440a Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 4 Dec 2013 15:32:48 +0700 Subject: [PATCH 040/428] Fix lint --- pymode/lint.py | 6 +++++- t/lint.vim | 12 ++++++++++++ t/test.py | 8 ++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 t/test.py diff --git a/pymode/lint.py b/pymode/lint.py index bd75c817..eaf62470 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -16,6 +16,10 @@ def code_check(): from pylama.main import parse_options from pylama.tasks import check_path + if not env.curbuf.name: + env.stop() + return False + options = parse_options( ignore=env.var('g:pymode_lint_ignore'), select=env.var('g:pymode_lint_select'), @@ -36,7 +40,7 @@ def code_check(): LOGGER.setLevel(logging.DEBUG) with silence_stderr(): - errors = check_path(path, options=options, code=env.source) + errors = check_path(path, options=options, code='\n'.join(env.curbuf)) env.debug("Find errors: ", len(errors)) sort_rules = env.var('g:pymode_lint_sort') diff --git a/t/lint.vim b/t/lint.vim index a9473675..14de6e54 100644 --- a/t/lint.vim +++ b/t/lint.vim @@ -14,5 +14,17 @@ describe 'pymode check code' Expect g:pymode_lint == 1 end + it 'lint code' + put =['# coding: utf-8', 'call_unknown_function()'] + PymodeLint + Expect getloclist(0) == [] + end + + it 'lint code' + e t/test.py + PymodeLint + Expect getloclist(0) == [{'lnum': 6, 'bufnr': 1, 'col': 0, 'valid': 1, 'vcol': 0, 'nr': 0, 'type': 'E', 'pattern': '', 'text': 'W0612 local variable "unused" is assigned to but never used [pyflakes]'}, {'lnum': 8, 'bufnr': 1, 'col': 0, 'valid': 1, 'vcol': 0, 'nr': 0, 'type': 'E', 'pattern': '', 'text': 'E0602 undefined name "unknown" [pyflakes]'}] + end + end diff --git a/t/test.py b/t/test.py new file mode 100644 index 00000000..ba7d5efb --- /dev/null +++ b/t/test.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# coding: utf-8 + + +def main(): + unused = 1 + +unknown() From c6aef54ae173ecde8fb5e740d9fb3abe2607189e Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 4 Dec 2013 15:33:38 +0700 Subject: [PATCH 041/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index d897f62a..84e774ad 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-04 0.7.6b +## 2013-12-04 0.7.7b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index a364e308..c232ddb6 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.6b + Version: 0.7.7b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index d085e415..a28b6ac1 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.6b" +let g:pymode_version = "0.7.7b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From ed8c533c6207f3a70e1b95b1bc2a2d039a813bc4 Mon Sep 17 00:00:00 2001 From: Mel Boyce Date: Thu, 5 Dec 2013 11:54:52 +1100 Subject: [PATCH 042/428] Added default value for err variable. --- pymode/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/run.py b/pymode/run.py index 3ad61117..4a39f530 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -13,7 +13,7 @@ def run_code(): """ Run python code in current buffer. """ - errors = [] + errors, err = [], '' line1, line2 = env.var('a:line1'), env.var('a:line2') lines = __prepare_lines(line1, line2) From e2599865200bca23593828c6687f45fbeda19773 Mon Sep 17 00:00:00 2001 From: Mel Boyce Date: Thu, 5 Dec 2013 12:18:50 +1100 Subject: [PATCH 043/428] Cleaned up some grammar. --- doc/pymode.txt | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index c232ddb6..e1c75036 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -312,7 +312,7 @@ after them 'C' and ... > let g:pymode_lint_sort = [] -Auto open cwindow (quickfix) if any errors has been finded +Auto open cwindow (quickfix) if any errors have been found *'g:pymode_lint_cwindow'* > let g:pymode_lint_cwindow = 1 @@ -335,7 +335,7 @@ Definitions for |signs| 3. Rope support ~ *pymode-rope* -Pymode support Rope refactoring operations, code completion and code assists. +Pymode supports Rope refactoring operations, code completion and code assists. Commands: |:PymodeRopeAutoImport| -- Resolve import for element under cursor @@ -397,8 +397,8 @@ Regenerate project cache on every save (if file has been modified) 4.1 Completion ~ *pymode-completion* -By default you can use for autocompletion. Will be -automatically selected first entry and you can press to insert in +By default you can use for autocompletion. The first entry will +be automatically selected and you can press to insert the entry in your code. and / works too. Autocompletion is also called by typing a period in |Insert| mode by default. @@ -552,8 +552,9 @@ Turn on pymode syntax *'g:pymode_syntax'* > let g:pymode_syntax = 1 -More slow synchronizing. Disable on the slow machine, but code in docstrings -could be broken. *'g:pymode_syntax_slow_sync'* +Slower syntax synchronization that is better at handling code blocks in +docstrings. Consider disabling this on slower hardware. + *'g:pymode_syntax_slow_sync'* > let g:pymode_syntax_slow_sync = 1 From ce879756b2d0faecae344608d77ae8186f11d537 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 5 Dec 2013 12:07:08 +0700 Subject: [PATCH 044/428] Fix encoding bug --- AUTHORS | 1 + Makefile | 1 - pymode/environment.py | 12 +++++++++--- pymode/rope.py | 7 +++++-- t/rope.vim | 5 +++++ 5 files changed, 20 insertions(+), 6 deletions(-) diff --git a/AUTHORS b/AUTHORS index f4c9dc68..39ab76ed 100644 --- a/AUTHORS +++ b/AUTHORS @@ -23,6 +23,7 @@ Contributors: * Lowe Thiderman (thiderman); * Martin Brochhaus (mbrochh); * Matthew Moses (mlmoses); +* Mel Boyce (syngin) * Mohammed (mbadran); * Naoya Inada (naoina); * Pedro Algarvio (s0undt3ch); diff --git a/Makefile b/Makefile index 1c58bcbe..68ff3dbb 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,6 @@ clean: # Temporary disable rope tests on Travis .PHONY: travis travis: - rm -rf t/rope.vim rake test .PHONY: test diff --git a/pymode/environment.py b/pymode/environment.py index 98d6a530..43d343be 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -77,7 +77,11 @@ def var(self, name, to_bool=False): return value def message(self, msg, history=False): - """ Show message to user. """ + """ Show message to user. + + :return: :None + + """ if history: return vim.command('echom "%s"' % str(msg)) @@ -188,14 +192,16 @@ def let(self, name, value): self.debug(cmd) vim.command(cmd) - def prepare_value(self, value): + def prepare_value(self, value, dumps=True): """ Decode bstr to vim encoding. :return unicode string: """ - value = json.dumps(value) + if dumps: + value = json.dumps(value) + if PY2: value = value.decode('utf-8').encode(self.options.get('encoding')) diff --git a/pymode/rope.py b/pymode/rope.py index dbf24309..3d44cd4b 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -85,7 +85,9 @@ def complete(dot=False): s_offset = codeassist.starting_offset(source, offset) p_prefix = prefix[offset - s_offset:] line = env.lines[row - 1] - env.curbuf[row - 1] = line[:col] + p_prefix + line[col:] # noqa + cline = line[:col] + p_prefix + line[col:] + if cline != line: + env.curbuf[row - 1] = env.prepare_value(cline, dumps=False) env.current.window.cursor = (row, col + len(p_prefix)) env.run('complete', col - len(prefix) + len(p_prefix) + 1, proposals) return True @@ -887,7 +889,8 @@ def _insert_import(name, module, ctx): source, _ = env.get_offset_params() lineno = ctx.importer.find_insertion_line(source) line = 'from %s import %s' % (module, name) - env.curbuf[lineno - 1:lineno - 1] = [line] + env.curbuf[lineno - 1:lineno - 1] = [ + env.prepare_value(line, dumps=False)] return True pyobject = ctx.project.pycore.resource_to_pyobject(ctx.resource) diff --git a/t/rope.vim b/t/rope.vim index 8fd09259..fa8154bd 100644 --- a/t/rope.vim +++ b/t/rope.vim @@ -17,6 +17,11 @@ describe 'pymode-plugin' end it 'pymode rope auto open project in current working directory' + + if $TRAVIS != "" + SKIP 'Travis fails on this test' + endif + let project_path = getcwd() . '/.ropeproject' Expect isdirectory(project_path) == 0 normal oimporX From 872544276aceda6e920813595326d5f7140632fa Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 5 Dec 2013 12:07:44 +0700 Subject: [PATCH 045/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 84e774ad..0c405c44 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-04 0.7.7b +## 2013-12-04 0.7.8b -------------------- * Update indentation support; * Python3 support; diff --git a/doc/pymode.txt b/doc/pymode.txt index e1c75036..a6e26a93 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.7b + Version: 0.7.8b ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index a28b6ac1..d7646c9d 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.7b" +let g:pymode_version = "0.7.8b" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From ea311a447cdf4f0ad95141862e3a38bc028c1d43 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 5 Dec 2013 12:40:41 +0700 Subject: [PATCH 046/428] Fix dot autocomplete --- pymode/environment.py | 2 +- pymode/rope.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pymode/environment.py b/pymode/environment.py index 43d343be..247d5700 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -158,7 +158,7 @@ def stop(self, value=None): """ Break Vim function. """ cmd = 'return' - if value: + if not value is None: cmd += ' ' + self.prepare_value(value) vim.command(cmd) diff --git a/pymode/rope.py b/pymode/rope.py index 3d44cd4b..edb91ea9 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -61,6 +61,9 @@ def completions(): return env.stop(proposals) +FROM_RE = re.compile(r'^from\s+[\.\w\d_]+$') + + @env.catch_exceptions def complete(dot=False): """ Ctrl+Space completion. @@ -70,6 +73,12 @@ def complete(dot=False): """ row, col = env.cursor source, offset = env.get_offset_params() + + cline = env.current.line[:col] + env.debug('dot completion', cline) + if FROM_RE.match(cline) or cline.endswith('..'): + return env.stop("") + proposals = get_proporsals(source, offset, dot=dot) if not proposals: return False From ffe926ff07176bedcb5a9ce8d0f32644f5adc62c Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 5 Dec 2013 16:31:56 +0700 Subject: [PATCH 047/428] Disable warning in python3 --- pymode/utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pymode/utils.py b/pymode/utils.py index 281e0152..43003c17 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -2,6 +2,7 @@ import os.path import sys import threading +import warnings from contextlib import contextmanager import vim # noqa @@ -16,6 +17,8 @@ DEBUG = int(vim.eval('g:pymode_debug')) PY2 = sys.version_info[0] == 2 +warnings.filterwarnings('ignore') + @contextmanager def silence_stderr(): From 7c981d25b10535eeacc81789af82a8fffdbc5df7 Mon Sep 17 00:00:00 2001 From: Erik Shilts Date: Thu, 5 Dec 2013 11:56:22 -0800 Subject: [PATCH 048/428] Update pymode.txt Fixed typos and added recommendation to not add .ropeproject folder to VCS. --- doc/pymode.txt | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index a6e26a93..3fa513ce 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -122,7 +122,7 @@ Setup pymode |quickfix| window *pymode-python-version* By default pymode looks for current python version supported in your Vim. -You could choose prefer version, but value will be testsed on loading. +You could choose prefer version, but value will be tested on loading. *'g:pymode_python'* > @@ -138,8 +138,8 @@ Set value to `python3` if you are working with python3 projects. You could use 2.2 Python indentation ~ *pymode-indent* -Pymode support PEP8-compatible python indent. -Enable pymode indentatation *'g:pymode_indent'* +Pymode supports PEP8-compatible python indent. +Enable pymode indentation *'g:pymode_indent'* > let g:pymode_indent = [] @@ -184,7 +184,7 @@ Enable pymode-motion *'g:pymode_motion'* 2.5 Show documentation ~ *pymode-documentation* -Pymode could show documetation for current word by `pydoc`. +Pymode could show documentation for current word by `pydoc`. Commands: *:PymodeDoc* — show documentation @@ -210,7 +210,7 @@ Enable automatic virtualenv detection *'g:pymode_virtualenv' > let g:pymode_virtualenv = 1 -Set path to virtualenv by manually *'g:pymode_virtualenv_path'* +Set path to virtualenv manually *'g:pymode_virtualenv_path'* > let g:pymode_virtualenv_path = $VIRTUAL_ENV @@ -233,8 +233,8 @@ Binds keys to run python code *'g:pymode_run_bind'* 2.8 Breakpoints ~ *pymode-breakpoints* -Pymode automatically detect available debugger (like pdb, ipdb, pudb) and user -could set/unset breakpoint with one key and without code checking and etc. +Pymode automatically detects available debugger (like pdb, ipdb, pudb) and user +can set/unset breakpoint with one key and without code checking and etc. Enable functionality *'g:pymode_breakpoint'* > @@ -254,7 +254,7 @@ Manually set breakpoint command (leave empty for automatic detection) *pymode-lint* Pymode supports `pylint`, `pep257`, `pep8`, `pyflakes`, `mccabe` code -checkers. You could run several checkers are simular. +checkers. You could run several similar checkers. Pymode uses Pylama library for code checking. Many options like skip files, errors and etc could be defined in `pylama.ini` file or modelines. @@ -351,14 +351,15 @@ Turn on the rope script *'g:pymode_rope'* > let g:pymode_rope = 1 -.roperoject Folder ~ +.ropeproject Folder ~ *.ropeproject* *:PymodeRopeNewProject* -- Open new Rope project in current working directory *:PymodeRopeRegenerate* -- Regenerate the project cache Rope uses a folder inside projects for holding project configuration and data. -Its default name is `.ropeproject`. +Its default name is `.ropeproject`. It is recommended that you do not add the +.ropeproject folder to version control system. Currently it is used for things such as: From 1361c99484b7757a352b16b8c56f5485bc01b866 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 17:10:13 +0700 Subject: [PATCH 049/428] Update deb build. --- Makefile | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 68ff3dbb..f343ec77 100644 --- a/Makefile +++ b/Makefile @@ -37,14 +37,11 @@ $(CURDIR)/build: cp -r after autoload doc ftplugin plugin pymode syntax $(CURDIR)/build/usr/share/vim/addons/. cp -r python-mode.yaml $(CURDIR)/build/usr/share/vim/registry/. -TARGET?=$(CURDIR)/deb PACKAGE_VERSION?=$(shell git describe --tags `git rev-list master --tags --max-count=1`) PACKAGE_NAME="vim-python-mode" PACKAGE_MAINTAINER="Kirill Klenov " PACKAGE_URL=http://github.com/klen/python-mode deb: clean $(CURDIR)/build - @git co gh-pages - @rm -rf deb @fpm -s dir -t deb -a all \ -n $(PACKAGE_NAME) \ -v $(PACKAGE_VERSION) \ @@ -58,10 +55,4 @@ deb: clean $(CURDIR)/build -d "python2.7" \ -d "vim-addon-manager" \ usr - @mkdir -p $(TARGET) - @prm --type deb --path $(TARGET) \ - --release precise,quantal,raring,saucy \ - --arch amd64,i386,all \ - --component main \ - --directory $(CURDIR) \ - --gpg horneds@gmail.com + @mv *.deb ~/Dropbox/projects/deb/load From 6bdd7b4631ee124fc3086b3aca27c10bcb6916d3 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 17:10:34 +0700 Subject: [PATCH 050/428] Clean on exit. --- autoload/pymode.vim | 6 ++++++ ftplugin/python/pymode.vim | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index bc325391..c518c415 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -130,3 +130,9 @@ fun! pymode#debug(msg) "{{{ echom string(g:pymode_debug) . ': ' . string(a:msg) endif endfunction "}}} + +fun! pymode#quit() "{{{ + augroup pymode + au! + augroup END +endfunction "}}} diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 2492f44b..2b3a0834 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -67,6 +67,12 @@ if g:pymode_lint command! -buffer -nargs=0 PymodeLintToggle :call pymode#lint#toggle() command! -buffer -nargs=0 PymodeLint :call pymode#lint#check() + if v:version > 703 || (v:version == 703 && has('patch544')) + au! QuitPre call pymode#quit() + else + au! pymode BufWinLeave * silent! lclose + endif + let b:pymode_error_line = -1 if g:pymode_lint_on_fly From c3751c92011c13a259bffad37255f13268d56e33 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 17:10:50 +0700 Subject: [PATCH 051/428] Skip lint for autopep8. --- pylama.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/pylama.ini b/pylama.ini index 8ccac54e..d1651077 100644 --- a/pylama.ini +++ b/pylama.ini @@ -1,2 +1,3 @@ [main] ignore = R0201,R0922,C0111,E1103 +skip = pymode/autopep8.py From b7eb0ecef4ee023bf43dc8df5072601204ec1832 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 17:11:13 +0700 Subject: [PATCH 052/428] Fix complete on dot. --- pymode/rope.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index edb91ea9..146a7fdf 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -61,7 +61,7 @@ def completions(): return env.stop(proposals) -FROM_RE = re.compile(r'^from\s+[\.\w\d_]+$') +FROM_RE = re.compile(r'^\s*from\s+[\.\w\d_]+$') @env.catch_exceptions @@ -76,7 +76,7 @@ def complete(dot=False): cline = env.current.line[:col] env.debug('dot completion', cline) - if FROM_RE.match(cline) or cline.endswith('..'): + if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'): return env.stop("") proposals = get_proporsals(source, offset, dot=dot) From 757b663d2ecb7ecc8cf6717e29d7c7a3baef0e92 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 17:12:16 +0700 Subject: [PATCH 053/428] fix folding. --- autoload/pymode/folding.vim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 4021297d..9865f5ed 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -17,7 +17,7 @@ fun! pymode#folding#text() " {{{ let line = getline(fs) let nucolwidth = &fdc + &number * &numberwidth - let windowwidth = winwidth(0) - nucolwidth - 3 + let windowwidth = winwidth(0) - nucolwidth - 6 let foldedlinecount = v:foldend - v:foldstart " expand tabs into spaces @@ -27,7 +27,7 @@ fun! pymode#folding#text() " {{{ let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) let line = substitute(line, '\%("""\|''''''\)', '', '') let fillcharcount = windowwidth - len(line) - len(foldedlinecount) - return line . '…' . repeat(" ",fillcharcount) . foldedlinecount . '…' . ' ' + return line . '…' . repeat(" ", fillcharcount) . ' ' . foldedlinecount . ' ' endfunction "}}} From 6924c759084d289d9406ef295f17f423a5592ab0 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 18:21:09 +0700 Subject: [PATCH 054/428] Update pylama --- pymode/libs/pylama/core.py | 3 ++- pymode/libs/pylama/lint/pylama_pep257/__init__.py | 5 ++--- pymode/libs/pylama/lint/pylama_pep8/__init__.py | 11 +++++++++-- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 711fd20c..4ca2849e 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -17,6 +17,7 @@ # Setup a logger LOGGER = logging.getLogger('pylama') +LOGGER.propagate = False STREAM = logging.StreamHandler(sys.stdout) LOGGER.addHandler(STREAM) @@ -173,7 +174,7 @@ def __init__(self, code, path): def __enter__(self): if self.code is None: self._file = open(self.path, 'rU') - self.code = self._file.read() + '\n\n' + self.code = self._file.read() return self def __exit__(self, t, value, traceback): diff --git a/pymode/libs/pylama/lint/pylama_pep257/__init__.py b/pymode/libs/pylama/lint/pylama_pep257/__init__.py index 4a1e6b39..1deef476 100644 --- a/pymode/libs/pylama/lint/pylama_pep257/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep257/__init__.py @@ -8,17 +8,16 @@ class Linter(BaseLinter): """ Mccabe code complexity. """ @staticmethod - def run(path, **meta): + def run(path, code=None, **meta): """ PEP257 code checking. :return list: List of errors. """ - f = open(path) from .pep257 import check_source errors = [] - for er in check_source(f.read(), path): + for er in check_source(code, path): errors.append(dict( lnum=er.line, col=er.char, diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py index a1dadc43..724fe5e8 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep8/__init__.py @@ -2,20 +2,27 @@ from .. import Linter as BaseLinter from .pep8 import BaseReport, StyleGuide +try: + from StringIO import StringIO +except ImportError: + from io import StringIO + + class Linter(BaseLinter): """ PEP8 code check. """ @staticmethod - def run(path, **meta): + def run(path, code=None, **meta): """ PEP8 code checking. :return list: List of errors. """ P8Style = StyleGuide(reporter=_PEP8Report) - return P8Style.input_file(path) + buf = StringIO(code) + return P8Style.input_file(path, lines=buf.readlines()) class _PEP8Report(BaseReport): From d7cf768f0f35166e356c2c3d53c8d87866d09bb2 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 6 Dec 2013 21:19:25 +0700 Subject: [PATCH 055/428] Update show documentation functioonality (c) blueyed --- autoload/pymode/doc.vim | 35 ++++++++++++++++++++++++++++------- ftplugin/python/pymode.vim | 2 +- pymode/__init__.py | 1 + 3 files changed, 30 insertions(+), 8 deletions(-) diff --git a/autoload/pymode/doc.vim b/autoload/pymode/doc.vim index ba84503f..3030dcea 100644 --- a/autoload/pymode/doc.vim +++ b/autoload/pymode/doc.vim @@ -2,16 +2,37 @@ " PymodePython import pymode + +fun! pymode#doc#find() "{{{ + " Extract the 'word' at the cursor, expanding leftwards across identifiers + " and the . operator, and rightwards across the identifier only. + " + " For example: + " import xml.dom.minidom + " ^ ! + " + " With the cursor at ^ this returns 'xml'; at ! it returns 'xml.dom'. + let l:line = getline(".") + let l:pre = l:line[:col(".") - 1] + let l:suf = l:line[col("."):] + let word = matchstr(pre, "[A-Za-z0-9_.]*$") . matchstr(suf, "^[A-Za-z0-9_]*") + call pymode#doc#show(word) +endfunction "}}} + + + fun! pymode#doc#show(word) "{{{ if a:word == '' call pymode#error("No name/symbol under cursor!") - else - call pymode#tempbuffer_open('__doc__') - PymodePython pymode.get_documentation() - setlocal nomodifiable - setlocal nomodified - setlocal filetype=rst - wincmd p + return 0 endif + + call pymode#tempbuffer_open('__doc__') + PymodePython pymode.get_documentation() + setlocal nomodifiable + setlocal nomodified + setlocal filetype=rst + wincmd p + endfunction "}}} diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 2b3a0834..2c162e7b 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -100,7 +100,7 @@ if g:pymode_doc command! -buffer -nargs=1 PymodeDoc call pymode#doc#show("") " Set keys - exe "nnoremap " g:pymode_doc_bind ":call pymode#doc#show(expand(''))" + exe "nnoremap " g:pymode_doc_bind ":call pymode#doc#find()" exe "vnoremap " g:pymode_doc_bind ":call pymode#doc#show(@*)" end diff --git a/pymode/__init__.py b/pymode/__init__.py index 75528a03..e647147b 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -35,3 +35,4 @@ def get_documentation(): help(vim.eval('a:word')) sys.stdout, out = _, sys.stdout.getvalue() vim.current.buffer.append(str(out).splitlines(), 0) + From 24cf7acd67d15af5c3182efc9f30fe5cbadc336f Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Dec 2013 00:26:41 +0700 Subject: [PATCH 056/428] Update pymode --- pymode/libs/pylama/config.py | 32 +- pymode/libs/pylama/core.py | 27 +- .../pylama/lint/pylama_mccabe/__init__.py | 2 +- .../libs/pylama/lint/pylama_pep8/__init__.py | 4 +- .../pylama/lint/pylama_pyflakes/__init__.py | 5 +- .../pylama/lint/pylama_pylint/__init__.py | 13 +- .../{pylint => }/astroid/__init__.py | 34 +- .../{pylint => }/astroid/__pkginfo__.py | 2 +- .../{pylint => }/astroid/as_string.py | 0 .../{pylint => }/astroid/bases.py | 6 +- .../lint/pylama_pylint/astroid/brain/py2gi.py | 147 ++++++ .../astroid/brain/py2mechanize.py | 20 + .../pylama_pylint/astroid/brain/py2qt4.py | 25 ++ .../pylama_pylint/astroid/brain/py2stdlib.py | 227 ++++++++++ .../{pylint => }/astroid/builder.py | 12 +- .../{pylint => }/astroid/exceptions.py | 0 .../{pylint => }/astroid/inference.py | 10 +- .../{pylint => }/astroid/manager.py | 17 +- .../{pylint => }/astroid/mixins.py | 2 +- .../{pylint => }/astroid/node_classes.py | 6 +- .../{pylint => }/astroid/nodes.py | 4 +- .../{pylint => }/astroid/protocols.py | 10 +- .../{pylint => }/astroid/raw_building.py | 10 +- .../{pylint => }/astroid/rebuilder.py | 2 +- .../{pylint => }/astroid/scoped_nodes.py | 18 +- .../{pylint => }/astroid/utils.py | 8 +- .../{pylint => }/logilab/__init__.py | 0 .../{pylint => }/logilab/common/__init__.py | 2 +- .../logilab/common/__pkginfo__.py | 11 +- .../{pylint => }/logilab/common/changelog.py | 2 +- .../{pylint => }/logilab/common/compat.py | 2 +- .../logilab/common/configuration.py | 137 +++--- .../{pylint => }/logilab/common/decorators.py | 16 +- .../logilab/common/deprecation.py | 188 ++++++++ .../{pylint => }/logilab/common/graph.py | 15 +- .../{pylint => }/logilab/common/interface.py | 0 .../{pylint => }/logilab/common/modutils.py | 73 ++- .../{pylint => }/logilab/common/optik_ext.py | 14 +- .../{pylint => }/logilab/common/textutils.py | 2 +- .../{pylint => }/logilab/common/tree.py | 4 +- .../logilab/common/ureports/__init__.py | 8 +- .../logilab/common/ureports/docbook_writer.py | 0 .../logilab/common/ureports/html_writer.py | 2 +- .../logilab/common/ureports/nodes.py | 2 +- .../logilab/common/ureports/text_writer.py | 4 +- .../{pylint => }/logilab/common/visitor.py | 0 pymode/libs/pylama/lint/pylama_pylint/main.py | 10 +- .../lint/pylama_pylint/pylint/__pkginfo__.py | 24 +- .../pylama_pylint/pylint/checkers/__init__.py | 8 +- .../pylama_pylint/pylint/checkers/base.py | 22 +- .../pylama_pylint/pylint/checkers/classes.py | 63 ++- .../pylint/checkers/design_analysis.py | 8 +- .../pylint/checkers/exceptions.py | 30 +- .../pylama_pylint/pylint/checkers/format.py | 424 +++++++++++++----- .../pylama_pylint/pylint/checkers/imports.py | 18 +- .../pylama_pylint/pylint/checkers/logging.py | 12 +- .../pylama_pylint/pylint/checkers/misc.py | 4 +- .../pylama_pylint/pylint/checkers/newstyle.py | 8 +- .../pylint/checkers/raw_metrics.py | 10 +- .../pylama_pylint/pylint/checkers/similar.py | 6 +- .../pylama_pylint/pylint/checkers/stdlib.py | 7 +- .../pylama_pylint/pylint/checkers/strings.py | 12 +- .../pylint/checkers/typecheck.py | 10 +- .../pylama_pylint/pylint/checkers/utils.py | 18 +- .../pylint/checkers/variables.py | 84 +++- .../lint/pylama_pylint/pylint/config.py | 11 +- .../lint/pylama_pylint/pylint/interfaces.py | 2 +- .../pylama/lint/pylama_pylint/pylint/lint.py | 66 ++- .../pylint/logilab/common/deprecation.py | 130 ------ .../pylint/reporters/__init__.py | 6 +- .../pylint/reporters/guireporter.py | 16 +- .../pylama_pylint/pylint/reporters/html.py | 7 +- .../pylama_pylint/pylint/reporters/text.py | 10 +- .../pylama/lint/pylama_pylint/pylint/utils.py | 239 +++++----- pymode/libs/pylama/main.py | 3 - pymode/libs/pylama/tasks.py | 13 +- pymode/libs/pylama/utils.py | 184 -------- pymode/lint.py | 3 +- 78 files changed, 1666 insertions(+), 927 deletions(-) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/__init__.py (83%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/__pkginfo__.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/as_string.py (100%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/bases.py (99%) create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/builder.py (96%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/exceptions.py (100%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/inference.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/manager.py (96%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/mixins.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/node_classes.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/nodes.py (94%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/protocols.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/raw_building.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/rebuilder.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/scoped_nodes.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/astroid/utils.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/__init__.py (100%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/__init__.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/__pkginfo__.py (87%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/changelog.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/compat.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/configuration.py (88%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/decorators.py (95%) create mode 100644 pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/graph.py (95%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/interface.py (100%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/modutils.py (90%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/optik_ext.py (97%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/textutils.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/tree.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/ureports/__init__.py (96%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/ureports/docbook_writer.py (100%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/ureports/html_writer.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/ureports/nodes.py (99%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/ureports/text_writer.py (98%) rename pymode/libs/pylama/lint/pylama_pylint/{pylint => }/logilab/common/visitor.py (100%) delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py delete mode 100644 pymode/libs/pylama/utils.py diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index 408fdcb2..c7c4eb15 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -1,4 +1,5 @@ """ Parse arguments from command line and configuration files. """ + import fnmatch from os import getcwd, path from re import compile as re @@ -15,16 +16,13 @@ #: A default checkers DEFAULT_LINTERS = 'pep8', 'pyflakes', 'mccabe' -#: A default complexity for mccabe checker -DEFAULT_COMPLEXITY = 10 - CURDIR = getcwd() DEFAULT_INI_PATH = path.join(CURDIR, 'pylama.ini') def parse_options( args=None, async=False, select='', ignore='', linters=DEFAULT_LINTERS, - complexity=DEFAULT_COMPLEXITY, options=DEFAULT_INI_PATH): + options=DEFAULT_INI_PATH): """ Parse options from command line and configuration files. :return argparse.Namespace: @@ -37,10 +35,9 @@ def parse_options( async=_Default(async), format=_Default('pep8'), select=_Default(select), ignore=_Default(ignore), report=_Default(None), verbose=_Default(False), - linters=_Default(','.join(linters)), complexity=_Default(complexity), - options=_Default(options)) + linters=_Default(','.join(linters)), options=_Default(options)) - if not (args is None): + if not args is None: options = parser.parse_args(args) # Parse options from ini file @@ -72,13 +69,18 @@ def parse_options( # Parse file related options options.file_params = dict() + options.linter_params = dict() for k, s in config.sections.items(): - if k != config.default_section: - mask = re(fnmatch.translate(k)) - options.file_params[mask] = dict(s) - options.file_params[mask]['lint'] = int( - options.file_params[mask].get('lint', 1) - ) + if k == config.default_section: + continue + if k in LINTERS: + options.linter_params[k] = dict(s) + continue + mask = re(fnmatch.translate(k)) + options.file_params[mask] = dict(s) + options.file_params[mask]['lint'] = int( + options.file_params[mask].get('lint', 1) + ) return options @@ -147,10 +149,6 @@ def parse_linters(csp_str): type=lambda s: [re(fnmatch.translate(p)) for p in s.split(',') if p], help="Skip files by masks (comma-separated, Ex. */messages.py)") - parser.add_argument( - "--complexity", "-c", default=_Default(DEFAULT_COMPLEXITY), type=int, - help="Set mccabe complexity.") - parser.add_argument("--report", "-r", help="Filename for report.") parser.add_argument( "--hook", action="store_true", help="Install Git (Mercurial) hook.") diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 4ca2849e..93890bc8 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -6,8 +6,10 @@ import logging import re import sys + from .lint.extensions import LINTERS + #: The skip pattern SKIP_PATTERN = re.compile(r'# *noqa\b', re.I).search @@ -22,28 +24,30 @@ LOGGER.addHandler(STREAM) -def run( - path, ignore=None, select=None, linters=None, config=None, code=None, - **meta): +def run(path, code=None, options=None): """ Run a code checkers with given params. :return errors: list of dictionaries with error's information """ errors = [] - linters = linters or LINTERS.items() - params = dict(ignore=ignore, select=select) + params = dict(ignore=options.ignore, select=options.select) + config = dict() + for mask in options.file_params: + if mask.match(path): + config.update(options.file_params[mask]) + try: with CodeContext(code, path) as ctx: code = ctx.code params = prepare_params( - parse_modeline(code), config, ignore=ignore, select=select - ) + parse_modeline(code), config, ignore=options.ignore, + select=options.select) if not params['lint']: return errors - for item in linters: + for item in options.linters: if not isinstance(item, tuple): item = (item, LINTERS.get(item)) @@ -53,14 +57,15 @@ def run( if not linter or not linter.allow(path): continue + meta = options.linter_params.get(name, dict()) result = linter.run(path, code=code, **meta) for e in result: + e['linter'] = name e['col'] = e.get('col') or 0 e['lnum'] = e.get('lnum') or 0 e['type'] = e.get('type') or 'E' - e['text'] = "{0} [{1}]".format((e.get( - 'text') or '').strip() - .replace("'", "\"").split('\n')[0], name) + e['text'] = "%s [%s]" % ( + e.get('text', '').strip().split('\n')[0], name) e['filename'] = path or '' errors.append(e) diff --git a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py index 0b035c07..e371bdb4 100644 --- a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py +++ b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py @@ -8,7 +8,7 @@ class Linter(BaseLinter): """ Mccabe code complexity. """ @staticmethod - def run(path, code=None, complexity=8, **meta): + def run(path, code=None, complexity=10, **meta): """ MCCabe code checking. :return list: List of errors. diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py index 724fe5e8..6948302e 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep8/__init__.py @@ -14,13 +14,13 @@ class Linter(BaseLinter): """ PEP8 code check. """ @staticmethod - def run(path, code=None, **meta): + def run(path, code=None, **options): """ PEP8 code checking. :return list: List of errors. """ - P8Style = StyleGuide(reporter=_PEP8Report) + P8Style = StyleGuide(reporter=_PEP8Report, **options) buf = StringIO(code) return P8Style.input_file(path, lines=buf.readlines()) diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py index c136b553..d2f95dee 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py @@ -8,7 +8,7 @@ class Linter(BaseLinter): """ Pyflakes code check. """ @staticmethod - def run(path, code=None, **meta): + def run(path, code=None, builtins=None, **meta): """ Pyflake code checking. :return list: List of errors. @@ -16,6 +16,9 @@ def run(path, code=None, **meta): """ import _ast from .pyflakes import checker + import os + + os.environ.setdefault('PYFLAKES_BUILTINS', builtins) errors = [] tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST) diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index ce270f54..56bdb3ae 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -9,7 +9,12 @@ __author__ = "horneds " __license__ = "BSD" -try: - from .main import Linter -except ImportError: - Linter = None +import os.path +import sys + +CURDIR = os.path.abspath(os.path.dirname(__file__)) +sys.path.insert(0, CURDIR) + +from .main import Linter +assert Linter + diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py similarity index 83% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py index af17875d..19c80902 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py @@ -48,23 +48,23 @@ # WARNING: internal imports order matters ! # make all exception classes accessible from astroid package -from .exceptions import * +from astroid.exceptions import * # make all node classes accessible from astroid package -from .nodes import * +from astroid.nodes import * # trigger extra monkey-patching -from . import inference +from astroid import inference # more stuff available -from . import raw_building -from .bases import YES, Instance, BoundMethod, UnboundMethod -from .node_classes import are_exclusive, unpack_infer -from .scoped_nodes import builtin_lookup +from astroid import raw_building +from astroid.bases import YES, Instance, BoundMethod, UnboundMethod +from astroid.node_classes import are_exclusive, unpack_infer +from astroid.scoped_nodes import builtin_lookup # make a manager instance (borg) as well as Project and Package classes # accessible from astroid package -from .manager import AstroidManager, Project +from astroid.manager import AstroidManager, Project MANAGER = AstroidManager() del AstroidManager @@ -106,13 +106,13 @@ def transform(node, infer_function=infer_function): return transform # load brain plugins -# from os import listdir -# from os.path import join, dirname -# BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') -# if BRAIN_MODULES_DIR not in sys.path: - # # add it to the end of the list so user path take precedence - # sys.path.append(BRAIN_MODULES_DIR) +from os import listdir +from os.path import join, dirname +BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') +if BRAIN_MODULES_DIR not in sys.path: + # add it to the end of the list so user path take precedence + sys.path.append(BRAIN_MODULES_DIR) # load modules in this directory -# for module in listdir(BRAIN_MODULES_DIR): - # if module.endswith('.py'): - # __import__(module[:-3]) +for module in listdir(BRAIN_MODULES_DIR): + if module.endswith('.py'): + __import__(module[:-3]) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py rename to pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py index a74b6b69..1e59829b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py @@ -21,7 +21,7 @@ modname = 'astroid' -numversion = (1, 0, 0) +numversion = (1, 0, 1) version = '.'.join([str(num) for num in numversion]) install_requires = ['logilab-common >= 0.60.0'] diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/as_string.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/as_string.py rename to pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py rename to pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py index 641f88ae..d8b1b8cb 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/astroid/bases.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py @@ -24,7 +24,7 @@ import sys from contextlib import contextmanager -from .exceptions import (InferenceError, AstroidError, NotFoundError, +from astroid.exceptions import (InferenceError, AstroidError, NotFoundError, UnresolvableName, UseInferenceDefault) @@ -583,11 +583,11 @@ def eq(self, value): return False def as_string(self): - from .as_string import to_code + from astroid.as_string import to_code return to_code(self) def repr_tree(self, ids=False): - from .as_string import dump + from astroid.as_string import dump return dump(self) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py new file mode 100644 index 00000000..5001b7cb --- /dev/null +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py @@ -0,0 +1,147 @@ +"""Astroid hooks for the Python 2 GObject introspection bindings. + +Helps with understanding everything imported from 'gi.repository' +""" + +import inspect +import sys + +from astroid import MANAGER, AstroidBuildingException +from astroid.builder import AstroidBuilder + + +_inspected_modules = {} + + +def _gi_build_stub(parent): + """ + Inspect the passed module recursively and build stubs for functions, + classes, etc. + """ + classes = {} + functions = {} + constants = {} + methods = {} + for name in dir(parent): + if not name or name.startswith("__"): + # GLib.IConv has a parameter named "" :/ + continue + try: + obj = getattr(parent, name) + except: + continue + + if inspect.isclass(obj): + classes[name] = obj + elif (inspect.isfunction(obj) or + inspect.isbuiltin(obj)): + functions[name] = obj + elif (inspect.ismethod(obj) or + inspect.ismethoddescriptor(obj)): + methods[name] = obj + elif type(obj) in [int, str]: + constants[name] = obj + elif (str(obj).startswith("= 0.5.1'] + install_requires.append('unittest2 >= 0.5.1') +if os.name == 'nt': + install_requires.append('colorama') classifiers = ["Topic :: Utilities", "Programming Language :: Python", diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py index 47641c88..74f51241 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/changelog.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py @@ -224,7 +224,7 @@ def format_title(self): def save(self): """write back change log""" # filetutils isn't importable in appengine, so import locally - from .fileutils import ensure_fs_mode + from logilab.common.fileutils import ensure_fs_mode ensure_fs_mode(self.file, S_IWRITE) self.write(open(self.file, 'w')) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py index c8a628a9..8983ece9 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/compat.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py @@ -90,7 +90,7 @@ def method_type(callable, instance, klass): except ImportError: import pickle -from .deprecation import deprecated +from logilab.common.deprecation import deprecated from itertools import izip, chain, imap if sys.version_info < (3, 0):# 2to3 will remove the imports diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py similarity index 88% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py index 2949f2c0..fa93a056 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/configuration.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of logilab-common. @@ -113,12 +113,12 @@ DuplicateSectionError from warnings import warn -from .compat import callable, raw_input, str_encode as _encode +from logilab.common.compat import callable, raw_input, str_encode as _encode +from logilab.common.deprecation import deprecated +from logilab.common.textutils import normalize_text, unquote +from logilab.common import optik_ext -from .textutils import normalize_text, unquote -from . import optik_ext as optparse - -OptionError = optparse.OptionError +OptionError = optik_ext.OptionError REQUIRED = [] @@ -136,63 +136,66 @@ def _get_encoding(encoding, stream): # validation functions ######################################################## +# validators will return the validated value or raise optparse.OptionValueError +# XXX add to documentation + def choice_validator(optdict, name, value): """validate and return a converted value for option of type 'choice' """ if not value in optdict['choices']: msg = "option %s: invalid value: %r, should be in %s" - raise optparse.OptionValueError(msg % (name, value, optdict['choices'])) + raise optik_ext.OptionValueError(msg % (name, value, optdict['choices'])) return value def multiple_choice_validator(optdict, name, value): """validate and return a converted value for option of type 'choice' """ choices = optdict['choices'] - values = optparse.check_csv(None, name, value) + values = optik_ext.check_csv(None, name, value) for value in values: if not value in choices: msg = "option %s: invalid value: %r, should be in %s" - raise optparse.OptionValueError(msg % (name, value, choices)) + raise optik_ext.OptionValueError(msg % (name, value, choices)) return values def csv_validator(optdict, name, value): """validate and return a converted value for option of type 'csv' """ - return optparse.check_csv(None, name, value) + return optik_ext.check_csv(None, name, value) def yn_validator(optdict, name, value): """validate and return a converted value for option of type 'yn' """ - return optparse.check_yn(None, name, value) + return optik_ext.check_yn(None, name, value) def named_validator(optdict, name, value): """validate and return a converted value for option of type 'named' """ - return optparse.check_named(None, name, value) + return optik_ext.check_named(None, name, value) def file_validator(optdict, name, value): """validate and return a filepath for option of type 'file'""" - return optparse.check_file(None, name, value) + return optik_ext.check_file(None, name, value) def color_validator(optdict, name, value): """validate and return a valid color for option of type 'color'""" - return optparse.check_color(None, name, value) + return optik_ext.check_color(None, name, value) def password_validator(optdict, name, value): """validate and return a string for option of type 'password'""" - return optparse.check_password(None, name, value) + return optik_ext.check_password(None, name, value) def date_validator(optdict, name, value): """validate and return a mx DateTime object for option of type 'date'""" - return optparse.check_date(None, name, value) + return optik_ext.check_date(None, name, value) def time_validator(optdict, name, value): """validate and return a time object for option of type 'time'""" - return optparse.check_time(None, name, value) + return optik_ext.check_time(None, name, value) def bytes_validator(optdict, name, value): """validate and return an integer for option of type 'bytes'""" - return optparse.check_bytes(None, name, value) + return optik_ext.check_bytes(None, name, value) VALIDATORS = {'string': unquote, @@ -222,14 +225,18 @@ def _call_validator(opttype, optdict, option, value): except TypeError: try: return VALIDATORS[opttype](value) - except optparse.OptionValueError: + except optik_ext.OptionValueError: raise except: - raise optparse.OptionValueError('%s value (%r) should be of type %s' % + raise optik_ext.OptionValueError('%s value (%r) should be of type %s' % (option, value, opttype)) # user input functions ######################################################## +# user input functions will ask the user for input on stdin then validate +# the result and return the validated value or raise optparse.OptionValueError +# XXX add to documentation + def input_password(optdict, question='password:'): from getpass import getpass while True: @@ -251,7 +258,7 @@ def input_validator(optdict, question): return None try: return _call_validator(opttype, optdict, None, value) - except optparse.OptionValueError, ex: + except optik_ext.OptionValueError, ex: msg = str(ex).split(':', 1)[-1].strip() print 'bad value: %s' % msg return input_validator @@ -264,6 +271,8 @@ def input_validator(optdict, question): for opttype in VALIDATORS.keys(): INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype)) +# utility functions ############################################################ + def expand_default(self, option): """monkey patch OptionParser.expand_default since we have a particular way to handle defaults to avoid overriding values in the configuration @@ -278,15 +287,15 @@ def expand_default(self, option): value = None else: optdict = provider.get_option_def(optname) - optname = provider.option_name(optname, optdict) + optname = provider.option_attrname(optname, optdict) value = getattr(provider.config, optname, optdict) value = format_option_value(optdict, value) - if value is optparse.NO_DEFAULT or not value: + if value is optik_ext.NO_DEFAULT or not value: value = self.NO_DEFAULT_VALUE return option.help.replace(self.default_tag, str(value)) -def convert(value, optdict, name=''): +def _validate(value, optdict, name=''): """return a validated value for an option according to its type optional argument name is only used for error message formatting @@ -297,6 +306,9 @@ def convert(value, optdict, name=''): # FIXME return value return _call_validator(_type, optdict, name, value) +convert = deprecated('[0.60] convert() was renamed _validate()')(_validate) + +# format and output functions ################################################## def comment(string): """return string as a comment""" @@ -401,6 +413,7 @@ def rest_format_section(stream, section, options, encoding=None, doc=None): print >> stream, '' print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``") +# Options Manager ############################################################## class OptionsManagerMixIn(object): """MixIn to handle a configuration from both a configuration file and @@ -425,7 +438,7 @@ def reset_parsers(self, usage='', version=None): # configuration file parser self.cfgfile_parser = ConfigParser() # command line parser - self.cmdline_parser = optparse.OptionParser(usage=usage, version=version) + self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version) self.cmdline_parser.options_manager = self self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS) @@ -461,7 +474,7 @@ def add_option_group(self, group_name, doc, options, provider): if group_name in self._mygroups: group = self._mygroups[group_name] else: - group = optparse.OptionGroup(self.cmdline_parser, + group = optik_ext.OptionGroup(self.cmdline_parser, title=group_name.capitalize()) self.cmdline_parser.add_option_group(group) group.level = provider.level @@ -497,9 +510,9 @@ def optik_option(self, provider, opt, optdict): # default is handled here and *must not* be given to optik if you # want the whole machinery to work if 'default' in optdict: - if (optparse.OPTPARSE_FORMAT_DEFAULT and 'help' in optdict and - optdict.get('default') is not None and - not optdict['action'] in ('store_true', 'store_false')): + if ('help' in optdict + and optdict.get('default') is not None + and not optdict['action'] in ('store_true', 'store_false')): optdict['help'] += ' [current: %default]' del optdict['default'] args = ['--' + str(opt)] @@ -566,7 +579,7 @@ def generate_manpage(self, pkginfo, section=1, stream=None): """ self._monkeypatch_expand_default() try: - optparse.generate_manpage(self.cmdline_parser, pkginfo, + optik_ext.generate_manpage(self.cmdline_parser, pkginfo, section, stream=stream or sys.stdout, level=self._maxlevel) finally: @@ -686,7 +699,7 @@ def load_command_line_configuration(self, args=None): def add_help_section(self, title, description, level=0): """add a dummy option section for help purpose """ - group = optparse.OptionGroup(self.cmdline_parser, + group = optik_ext.OptionGroup(self.cmdline_parser, title=title.capitalize(), description=description) group.level = level @@ -694,18 +707,18 @@ def add_help_section(self, title, description, level=0): self.cmdline_parser.add_option_group(group) def _monkeypatch_expand_default(self): - # monkey patch optparse to deal with our default values + # monkey patch optik_ext to deal with our default values try: - self.__expand_default_backup = optparse.HelpFormatter.expand_default - optparse.HelpFormatter.expand_default = expand_default + self.__expand_default_backup = optik_ext.HelpFormatter.expand_default + optik_ext.HelpFormatter.expand_default = expand_default except AttributeError: # python < 2.4: nothing to be done pass def _unmonkeypatch_expand_default(self): # remove monkey patch - if hasattr(optparse.HelpFormatter, 'expand_default'): - # unpatch optparse to avoid side effects - optparse.HelpFormatter.expand_default = self.__expand_default_backup + if hasattr(optik_ext.HelpFormatter, 'expand_default'): + # unpatch optik_ext to avoid side effects + optik_ext.HelpFormatter.expand_default = self.__expand_default_backup def help(self, level=0): """return the usage string for available options """ @@ -734,6 +747,7 @@ def __call__(self, *args, **kwargs): assert self._inst, 'unbound method' return getattr(self._inst, self.method)(*args, **kwargs) +# Options Provider ############################################################# class OptionsProviderMixIn(object): """Mixin to provide options to an OptionsManager""" @@ -745,7 +759,7 @@ class OptionsProviderMixIn(object): level = 0 def __init__(self): - self.config = optparse.Values() + self.config = optik_ext.Values() for option in self.options: try: option, optdict = option @@ -777,41 +791,41 @@ def option_default(self, opt, optdict=None): default = default() return default - def option_name(self, opt, optdict=None): + def option_attrname(self, opt, optdict=None): """get the config attribute corresponding to opt """ if optdict is None: optdict = self.get_option_def(opt) return optdict.get('dest', opt.replace('-', '_')) + option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname) def option_value(self, opt): """get the current value for the given option""" - return getattr(self.config, self.option_name(opt), None) + return getattr(self.config, self.option_attrname(opt), None) def set_option(self, opt, value, action=None, optdict=None): """method called to set an option (registered in the options list) """ - # print "************ setting option", opt," to value", value if optdict is None: optdict = self.get_option_def(opt) if value is not None: - value = convert(value, optdict, opt) + value = _validate(value, optdict, opt) if action is None: action = optdict.get('action', 'store') if optdict.get('type') == 'named': # XXX need specific handling - optname = self.option_name(opt, optdict) + optname = self.option_attrname(opt, optdict) currentvalue = getattr(self.config, optname, None) if currentvalue: currentvalue.update(value) value = currentvalue if action == 'store': - setattr(self.config, self.option_name(opt, optdict), value) + setattr(self.config, self.option_attrname(opt, optdict), value) elif action in ('store_true', 'count'): - setattr(self.config, self.option_name(opt, optdict), 0) + setattr(self.config, self.option_attrname(opt, optdict), 0) elif action == 'store_false': - setattr(self.config, self.option_name(opt, optdict), 1) + setattr(self.config, self.option_attrname(opt, optdict), 1) elif action == 'append': - opt = self.option_name(opt, optdict) + opt = self.option_attrname(opt, optdict) _list = getattr(self.config, opt, None) if _list is None: if isinstance(value, (list, tuple)): @@ -893,6 +907,7 @@ def options_and_values(self, options=None): for optname, optdict in options: yield (optname, optdict, self.option_value(optname)) +# configuration ################################################################ class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): """basic mixin for simple configurations which don't need the @@ -913,7 +928,7 @@ def __init__(self, *args, **kwargs): continue if not gdef in self.option_groups: self.option_groups.append(gdef) - self.register_options_provider(self, own_group=0) + self.register_options_provider(self, own_group=False) def register_options(self, options): """add some options to the configuration""" @@ -932,8 +947,8 @@ def __iter__(self): def __getitem__(self, key): try: - return getattr(self.config, self.option_name(key)) - except (optparse.OptionValueError, AttributeError): + return getattr(self.config, self.option_attrname(key)) + except (optik_ext.OptionValueError, AttributeError): raise KeyError(key) def __setitem__(self, key, value): @@ -941,7 +956,7 @@ def __setitem__(self, key, value): def get(self, key, default=None): try: - return getattr(self.config, self.option_name(key)) + return getattr(self.config, self.option_attrname(key)) except (OptionError, AttributeError): return default @@ -977,20 +992,21 @@ def __getattr__(self, key): def __getitem__(self, key): provider = self.config._all_options[key] try: - return getattr(provider.config, provider.option_name(key)) + return getattr(provider.config, provider.option_attrname(key)) except AttributeError: raise KeyError(key) def __setitem__(self, key, value): - self.config.global_set_option(self.config.option_name(key), value) + self.config.global_set_option(self.config.option_attrname(key), value) def get(self, key, default=None): provider = self.config._all_options[key] try: - return getattr(provider.config, provider.option_name(key)) + return getattr(provider.config, provider.option_attrname(key)) except AttributeError: return default +# other functions ############################################################## def read_old_config(newconfig, changes, configfile): """initialize newconfig from a deprecated configuration file @@ -1055,8 +1071,13 @@ def read_old_config(newconfig, changes, configfile): newconfig.set_option(optname, oldconfig[optname], optdict=optdef) -def merge_options(options): - """preprocess options to remove duplicate""" +def merge_options(options, optgroup=None): + """preprocess a list of options and remove duplicates, returning a new list + (tuple actually) of options. + + Options dictionaries are copied to avoid later side-effect. Also, if + `otpgroup` argument is specified, ensure all options are in the given group. + """ alloptions = {} options = list(options) for i in range(len(options)-1, -1, -1): @@ -1065,5 +1086,9 @@ def merge_options(options): options.pop(i) alloptions[optname].update(optdict) else: + optdict = optdict.copy() + options[i] = (optname, optdict) alloptions[optname] = optdict + if optgroup is not None: + alloptions[optname]['group'] = optgroup return tuple(options) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py similarity index 95% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py index 84ea90ba..34bbd3a9 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/decorators.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of logilab-common. @@ -19,9 +19,10 @@ __docformat__ = "restructuredtext en" import sys +import types from time import clock, time -from .compat import callable, method_type +from logilab.common.compat import callable, method_type # XXX rewrite so we can use the decorator syntax when keyarg has to be specified @@ -249,7 +250,9 @@ def wrapper(self, *args, **kwargs): def monkeypatch(klass, methodname=None): - """Decorator extending class with the decorated callable + """Decorator extending class with the decorated callable. This is basically + a syntactic sugar vs class assignment. + >>> class A: ... pass >>> @monkeypatch(A) @@ -273,11 +276,6 @@ def decorator(func): raise AttributeError('%s has no __name__ attribute: ' 'you should provide an explicit `methodname`' % func) - if callable(func) and sys.version_info < (3, 0): - setattr(klass, name, method_type(func, None, klass)) - else: - # likely a property - # this is quite borderline but usage already in the wild ... - setattr(klass, name, func) + setattr(klass, name, func) return func return decorator diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py new file mode 100644 index 00000000..c5685ec2 --- /dev/null +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py @@ -0,0 +1,188 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Deprecation utilities.""" + +__docformat__ = "restructuredtext en" + +import sys +from warnings import warn + +from logilab.common.changelog import Version + + +class DeprecationWrapper(object): + """proxy to print a warning on access to any attribute of the wrapped object + """ + def __init__(self, proxied, msg=None): + self._proxied = proxied + self._msg = msg + + def __getattr__(self, attr): + warn(self._msg, DeprecationWarning, stacklevel=2) + return getattr(self._proxied, attr) + + def __setattr__(self, attr, value): + if attr in ('_proxied', '_msg'): + self.__dict__[attr] = value + else: + warn(self._msg, DeprecationWarning, stacklevel=2) + setattr(self._proxied, attr, value) + + +class DeprecationManager(object): + """Manage the deprecation message handling. Messages are dropped for + versions more recent than the 'compatible' version. Example:: + + deprecator = deprecation.DeprecationManager("module_name") + deprecator.compatibility('1.3') + + deprecator.warn('1.2', "message.") + + @deprecator.deprecated('1.2', 'Message') + def any_func(): + pass + + class AnyClass(object): + __metaclass__ = deprecator.class_deprecated('1.2') + """ + def __init__(self, module_name=None): + """ + """ + self.module_name = module_name + self.compatible_version = None + + def compatibility(self, compatible_version): + """Set the compatible version. + """ + self.compatible_version = Version(compatible_version) + + def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=None): + """Display a deprecation message only if the version is older than the + compatible version. + """ + def decorator(func): + message = reason or 'The function "%s" is deprecated' + if '%s' in message: + message %= func.func_name + def wrapped(*args, **kwargs): + self.warn(version, message, stacklevel) + return func(*args, **kwargs) + return wrapped + return decorator + + def class_deprecated(self, version=None): + class metaclass(type): + """metaclass to print a warning on instantiation of a deprecated class""" + + def __call__(cls, *args, **kwargs): + msg = getattr(cls, "__deprecation_warning__", + "%(cls)s is deprecated") % {'cls': cls.__name__} + self.warn(version, msg) + return type.__call__(cls, *args, **kwargs) + return metaclass + + def moved(self, version, modpath, objname): + """use to tell that a callable has been moved to a new module. + + It returns a callable wrapper, so that when its called a warning is printed + telling where the object can be found, import is done (and not before) and + the actual object is called. + + NOTE: the usage is somewhat limited on classes since it will fail if the + wrapper is use in a class ancestors list, use the `class_moved` function + instead (which has no lazy import feature though). + """ + def callnew(*args, **kwargs): + from logilab.common.modutils import load_module_from_name + message = "object %s has been moved to module %s" % (objname, modpath) + self.warn(version, message) + m = load_module_from_name(modpath) + return getattr(m, objname)(*args, **kwargs) + return callnew + + def class_renamed(self, version, old_name, new_class, message=None): + clsdict = {} + if message is None: + message = '%s is deprecated, use %s' % (old_name, new_class.__name__) + clsdict['__deprecation_warning__'] = message + try: + # new-style class + return self.class_deprecated(version)(old_name, (new_class,), clsdict) + except (NameError, TypeError): + # old-style class + class DeprecatedClass(new_class): + """FIXME: There might be a better way to handle old/new-style class + """ + def __init__(self, *args, **kwargs): + self.warn(version, message) + new_class.__init__(self, *args, **kwargs) + return DeprecatedClass + + def class_moved(self, version, new_class, old_name=None, message=None): + """nice wrapper around class_renamed when a class has been moved into + another module + """ + if old_name is None: + old_name = new_class.__name__ + if message is None: + message = 'class %s is now available as %s.%s' % ( + old_name, new_class.__module__, new_class.__name__) + return self.class_renamed(version, old_name, new_class, message) + + def warn(self, version=None, reason="", stacklevel=2): + """Display a deprecation message only if the version is older than the + compatible version. + """ + if (self.compatible_version is None + or version is None + or Version(version) < self.compatible_version): + if self.module_name and version: + reason = '[%s %s] %s' % (self.module_name, version, reason) + elif self.module_name: + reason = '[%s] %s' % (self.module_name, reason) + elif version: + reason = '[%s] %s' % (version, reason) + warn(reason, DeprecationWarning, stacklevel=stacklevel) + +_defaultdeprecator = DeprecationManager() + +def deprecated(reason=None, stacklevel=2, name=None, doc=None): + return _defaultdeprecator.deprecated(None, reason, stacklevel, name, doc) + +class_deprecated = _defaultdeprecator.class_deprecated() + +def moved(modpath, objname): + return _defaultdeprecator.moved(None, modpath, objname) +moved.__doc__ = _defaultdeprecator.moved.__doc__ + +def class_renamed(old_name, new_class, message=None): + """automatically creates a class which fires a DeprecationWarning + when instantiated. + + >>> Set = class_renamed('Set', set, 'Set is now replaced by set') + >>> s = Set() + sample.py:57: DeprecationWarning: Set is now replaced by set + s = Set() + >>> + """ + return _defaultdeprecator.class_renamed(None, old_name, new_class, message) + +def class_moved(new_class, old_name=None, message=None): + return _defaultdeprecator.class_moved(None, new_class, old_name, message) +class_moved.__doc__ = _defaultdeprecator.class_moved.__doc__ + diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py similarity index 95% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py index 0bb9f38e..94a71b6e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/graph.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py @@ -28,7 +28,7 @@ import os import sys import tempfile -from .compat import str_encode +import codecs def escape(value): """Make usable in a dot file.""" @@ -106,8 +106,8 @@ def generate(self, outputfile=None, dotfile=None, mapfile=None): ppng, outputfile = tempfile.mkstemp(".png", name) os.close(pdot) os.close(ppng) - pdot = open(dot_sourcepath, 'w') - pdot.write(str_encode(self.source, 'utf8')) + pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8') + pdot.write(self.source) pdot.close() if target != 'dot': if sys.platform == 'win32': @@ -226,10 +226,10 @@ def get_cycles(graph_dict, vertices=None): if vertices is None: vertices = graph_dict.keys() for vertice in vertices: - _get_cycles(graph_dict, vertice, [], result) + _get_cycles(graph_dict, [], set(), result, vertice) return result -def _get_cycles(graph_dict, vertice=None, path=None, result=None): +def _get_cycles(graph_dict, path, visited, result, vertice): """recursive function doing the real work for get_cycles""" if vertice in path: cycle = [vertice] @@ -248,7 +248,10 @@ def _get_cycles(graph_dict, vertice=None, path=None, result=None): path.append(vertice) try: for node in graph_dict[vertice]: - _get_cycles(graph_dict, node, path, result) + # don't check already visited nodes again + if node not in visited: + _get_cycles(graph_dict, path, visited, result, node) + visited.add(node) except KeyError: pass path.pop() diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/interface.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/interface.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/interface.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py similarity index 90% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py index ef707b23..9d0bb495 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/modutils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of logilab-common. @@ -34,6 +34,7 @@ from os.path import splitext, join, abspath, isdir, dirname, exists, basename from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY from distutils.sysconfig import get_config_var, get_python_lib, get_python_version +from distutils.errors import DistutilsPlatformError try: import zipimport @@ -42,7 +43,7 @@ ZIPFILE = object() -from . import STD_BLACKLIST, _handle_blacklist +from logilab.common import STD_BLACKLIST, _handle_blacklist # Notes about STD_LIB_DIR # Consider arch-specific installation for STD_LIB_DIR definition @@ -53,12 +54,18 @@ if sys.platform.startswith('win'): PY_SOURCE_EXTS = ('py', 'pyw') PY_COMPILED_EXTS = ('dll', 'pyd') - STD_LIB_DIR = get_python_lib(standard_lib=1) else: PY_SOURCE_EXTS = ('py',) PY_COMPILED_EXTS = ('so',) - # extend lib dir with some arch-dependant paths - STD_LIB_DIR = join(get_config_var("LIBDIR"), "python%s" % get_python_version()) + +try: + STD_LIB_DIR = get_python_lib(standard_lib=1) +# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to +# non-valid path, see https://bugs.pypy.org/issue1164 +except DistutilsPlatformError: + STD_LIB_DIR = '//' + +EXT_LIB_DIR = get_python_lib() BUILTIN_MODULES = dict(zip(sys.builtin_module_names, [1]*len(sys.builtin_module_names))) @@ -151,6 +158,9 @@ def load_module_from_modpath(parts, path=None, use_sys=1): if len(modpath) != len(parts): # even with use_sys=False, should try to get outer packages from sys.modules module = sys.modules.get(curname) + elif use_sys: + # because it may have been indirectly loaded through a parent + module = sys.modules.get(curname) if module is None: mp_file, mp_filename, mp_desc = find_module(part, path) module = load_module(curname, mp_file, mp_filename, mp_desc) @@ -230,10 +240,7 @@ def modpath_from_file(filename, extrapath=None): return extrapath[path_].split('.') + submodpath for path in sys.path: path = abspath(path) - if path and base[:len(path)] == path: - if filename.find('site-packages') != -1 and \ - path.find('site-packages') == -1: - continue + if path and base.startswith(path): modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] if _check_init(path, modpath[:-1]): return modpath @@ -493,13 +500,11 @@ def is_standard_module(modname, std_path=(STD_LIB_DIR,)): if filename is None: return 1 filename = abspath(filename) + if filename.startswith(EXT_LIB_DIR): + return 0 for path in std_path: - path = abspath(path) - if filename.startswith(path): - pfx_len = len(path) - if filename[pfx_len+1:pfx_len+14] != 'site-packages': - return 1 - return 0 + if filename.startswith(abspath(path)): + return 1 return False @@ -565,10 +570,15 @@ def _search_zip(modpath, pic): if importer.find_module(modpath[0]): if not importer.find_module('/'.join(modpath)): raise ImportError('No module named %s in %s/%s' % ( - '.'.join(modpath[1:]), file, modpath)) + '.'.join(modpath[1:]), filepath, modpath)) return ZIPFILE, abspath(filepath) + '/' + '/'.join(modpath), filepath raise ImportError('No module named %s' % '.'.join(modpath)) +try: + import pkg_resources +except ImportError: + pkg_resources = None + def _module_file(modpath, path=None): """get a module type / file path @@ -599,16 +609,32 @@ def _module_file(modpath, path=None): checkeggs = True except AttributeError: checkeggs = False + # pkg_resources support (aka setuptools namespace packages) + if pkg_resources is not None and modpath[0] in pkg_resources._namespace_packages and len(modpath) > 1: + # setuptools has added into sys.modules a module object with proper + # __path__, get back information from there + module = sys.modules[modpath.pop(0)] + path = module.__path__ imported = [] while modpath: + modname = modpath[0] + # take care to changes in find_module implementation wrt builtin modules + # + # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23) + # >>> imp.find_module('posix') + # (None, 'posix', ('', '', 6)) + # + # Python 3.3.1 (default, Apr 26 2013, 12:08:46) + # >>> imp.find_module('posix') + # (None, None, ('', '', 6)) try: - _, mp_filename, mp_desc = find_module(modpath[0], path) + _, mp_filename, mp_desc = find_module(modname, path) except ImportError: if checkeggs: return _search_zip(modpath, pic)[:2] raise else: - if checkeggs: + if checkeggs and mp_filename: fullabspath = [abspath(x) for x in _path] try: pathindex = fullabspath.index(dirname(abspath(mp_filename))) @@ -628,7 +654,16 @@ def _module_file(modpath, path=None): if mtype != PKG_DIRECTORY: raise ImportError('No module %s in %s' % ('.'.join(modpath), '.'.join(imported))) - path = [mp_filename] + # XXX guess if package is using pkgutil.extend_path by looking for + # those keywords in the first four Kbytes + data = open(join(mp_filename, '__init__.py')).read(4096) + if 'pkgutil' in data and 'extend_path' in data: + # extend_path is called, search sys.path for module/packages of this name + # see pkgutil.extend_path documentation + path = [join(p, modname) for p in sys.path + if isdir(join(p, modname))] + else: + path = [mp_filename] return mtype, mp_filename def _is_python_file(filename): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py similarity index 97% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py index e65d8946..49d685b1 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/optik_ext.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py @@ -65,10 +65,7 @@ except ImportError: HAS_MX_DATETIME = False - -OPTPARSE_FORMAT_DEFAULT = sys.version_info >= (2, 4) - -from .textutils import splitstrip +from logilab.common.textutils import splitstrip def check_regexp(option, opt, value): """check a regexp value by trying to compile it @@ -168,13 +165,13 @@ def check_color(option, opt, value): raise OptionValueError(msg % (opt, value)) def check_time(option, opt, value): - from .textutils import TIME_UNITS, apply_units + from logilab.common.textutils import TIME_UNITS, apply_units if isinstance(value, (int, long, float)): return value return apply_units(value, TIME_UNITS) def check_bytes(option, opt, value): - from .textutils import BYTE_UNITS, apply_units + from logilab.common.textutils import BYTE_UNITS, apply_units if hasattr(value, '__int__'): return value return apply_units(value, BYTE_UNITS) @@ -227,10 +224,7 @@ def _check_choice(self): def process(self, opt, value, values, parser): # First, convert the value(s) to the right type. Howl if any # value(s) are bogus. - try: - value = self.convert_value(opt, value) - except AttributeError: # py < 2.4 - value = self.check_value(opt, value) + value = self.convert_value(opt, value) if self.type == 'named': existant = getattr(values, self.dest) if existant: diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py index 1d77a4ce..f55c0040 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/textutils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py @@ -53,7 +53,7 @@ except ImportError: linesep = '\n' # gae -from .deprecation import deprecated +from logilab.common.deprecation import deprecated MANUAL_UNICODE_MAP = { u'\xa1': u'!', # INVERTED EXCLAMATION MARK diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/tree.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/tree.py index 60698d62..885eb0fa 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/tree.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/tree.py @@ -25,8 +25,8 @@ import sys -from . import flatten -from .visitor import VisitedMixIn, FilteredIterator, no_filter +from logilab.common import flatten +from logilab.common.visitor import VisitedMixIn, FilteredIterator, no_filter ## Exceptions ################################################################# diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py similarity index 96% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py index 120d0911..dcffcfa3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py @@ -27,7 +27,7 @@ from cStringIO import StringIO from StringIO import StringIO as UStringIO -from ..textutils import linesep +from logilab.common.textutils import linesep def get_nodes(node, klass): @@ -169,6 +169,6 @@ def writeln(data=''): del self.writeln -from .nodes import * -from .text_writer import TextWriter -from .html_writer import HTMLWriter +from logilab.common.ureports.nodes import * +from logilab.common.ureports.text_writer import TextWriter +from logilab.common.ureports.html_writer import HTMLWriter diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/docbook_writer.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/docbook_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/docbook_writer.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py index 0f70e8fb..1d095034 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/html_writer.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py @@ -20,7 +20,7 @@ from cgi import escape -from . import BaseWriter +from logilab.common.ureports import BaseWriter class HTMLWriter(BaseWriter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py index a4470dc4..d63b5828 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py @@ -21,7 +21,7 @@ """ __docformat__ = "restructuredtext en" -from ..tree import VNode +from logilab.common.tree import VNode class BaseComponent(VNode): """base report component diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py index c8c09e1c..04c8f263 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/ureports/text_writer.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py @@ -18,8 +18,8 @@ """Text formatting drivers for ureports""" __docformat__ = "restructuredtext en" -from ..textutils import linesep -from . import BaseWriter +from logilab.common.textutils import linesep +from logilab.common.ureports import BaseWriter TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^'] diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/visitor.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/visitor.py rename to pymode/libs/pylama/lint/pylama_pylint/logilab/common/visitor.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 6132c2e8..20bebf7c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -1,9 +1,12 @@ """ Pylint support. """ - from os import path as op, environ from pylama.lint import Linter as BaseLinter # noqa +from astroid import MANAGER # noqa +from pylint.lint import Run # noqa +from pylint.reporters import BaseReporter # noqa + PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc')) @@ -20,10 +23,6 @@ def run(path, **meta): # noqa """ - from .pylint.lint import Run - from .pylint.reporters import BaseReporter - from .pylint.astroid import MANAGER - MANAGER.astroid_cache.clear() class Reporter(BaseReporter): @@ -53,3 +52,4 @@ def add_message(self, msg_id, location, msg): runner = Run( [path] + attrs, reporter=Reporter(), exit=False) return runner.linter.reporter.errors + diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py index 997b9a59..614828ee 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py @@ -30,18 +30,18 @@ author = 'Logilab' author_email = 'python-projects@lists.logilab.org' -classifiers = ['Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License (GPL)', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3', - 'Topic :: Software Development :: Debuggers', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing', - ] +classifiers = ['Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: GNU General Public License (GPL)', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 3', + 'Topic :: Software Development :: Debuggers', + 'Topic :: Software Development :: Quality Assurance', + 'Topic :: Software Development :: Testing', + ] long_desc = """\ diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py index dddc1bab..27dc3645 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py @@ -43,11 +43,11 @@ import warnings from os.path import dirname -from ..astroid.utils import ASTWalker -from ..logilab.common.configuration import OptionsProviderMixIn +from astroid.utils import ASTWalker +from logilab.common.configuration import OptionsProviderMixIn -from ..reporters import diff_string -from ..utils import register_plugins +from pylint.reporters import diff_string +from pylint.utils import register_plugins def table_lines_from_stats(stats, old_stats, columns): """get values listed in from and , diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py index 5282898e..de40c4cb 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py @@ -17,15 +17,16 @@ """basic checker for Python code""" import sys -from .. import astroid -from ..logilab.common.ureports import Table -from ..astroid import are_exclusive, bases - -from ..interfaces import IAstroidChecker -from ..utils import EmptyReport -from ..reporters import diff_string -from . import BaseChecker -from .utils import ( +import astroid +from logilab.common.ureports import Table +from astroid import are_exclusive +import astroid.bases + +from pylint.interfaces import IAstroidChecker +from pylint.utils import EmptyReport +from pylint.reporters import diff_string +from pylint.checkers import BaseChecker +from pylint.checkers.utils import ( check_messages, clobber_in_except, is_builtin_object, @@ -37,7 +38,6 @@ import re - # regex for class/function/variable/constant name CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$') MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$') @@ -541,7 +541,7 @@ def visit_function(self, node): value = default.infer().next() except astroid.InferenceError: continue - builtins = bases.BUILTINS + builtins = astroid.bases.BUILTINS if (isinstance(value, astroid.Instance) and value.qname() in ['.'.join([builtins, x]) for x in ('set', 'dict', 'list')]): if value is default: diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py index ecacecea..fd761463 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py @@ -16,15 +16,21 @@ """classes checker for Python code """ from __future__ import generators - -from .. import astroid -from ..astroid import YES, Instance, are_exclusive, AssAttr - -from ..interfaces import IAstroidChecker -from . import BaseChecker -from .utils import (PYMETHODS, overrides_a_method, +import sys +import astroid +from astroid import YES, Instance, are_exclusive, AssAttr +from astroid.bases import Generator + +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import (PYMETHODS, overrides_a_method, check_messages, is_attr_private, is_attr_protected, node_frame_class) +if sys.version_info >= (3, 0): + NEXT_METHOD = '__next__' +else: + NEXT_METHOD = 'next' + def class_is_abstract(node): """return true if the given class node should be considered as an abstract class @@ -142,6 +148,16 @@ class implementing this interface'), 'non-parent-init-called', 'Used when an __init__ method is called on a class which is not \ in the direct ancestors for the analysed class.'), + 'W0234': ('__iter__ returns non-iterator', + 'non-iterator-returned', + 'Used when an __iter__ method returns something which is not an \ + iterable (i.e. has no `%s` method)' % NEXT_METHOD), + 'E0235': ('__exit__ must accept 3 arguments: type, value, traceback', + 'bad-context-manager', + 'Used when the __exit__ special method, belonging to a \ + context manager, does not accept 3 arguments \ + (type, value, traceback).') + } @@ -311,6 +327,39 @@ def visit_function(self, node): except astroid.NotFoundError: pass + # check non-iterators in __iter__ + if node.name == '__iter__': + self._check_iter(node) + elif node.name == '__exit__': + self._check_exit(node) + + def _check_iter(self, node): + try: + infered = node.infer_call_result(node) + except astroid.InferenceError: + return + + for infered_node in infered: + if (infered_node is YES + or isinstance(infered_node, Generator)): + continue + if isinstance(infered_node, astroid.Instance): + try: + infered_node.local_attr(NEXT_METHOD) + except astroid.NotFoundError: + self.add_message('non-iterator-returned', + node=node) + break + + def _check_exit(self, node): + positional = sum(1 for arg in node.args.args if arg.name != 'self') + if positional < 3 and not node.args.vararg: + self.add_message('bad-context-manager', + node=node) + elif positional > 3: + self.add_message('bad-context-manager', + node=node) + def leave_function(self, node): """on method node, check if this method couldn't be a function diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py index f81efc37..f3b58821 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py @@ -15,11 +15,11 @@ # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """check for signs of poor design""" -from ..astroid import Function, If, InferenceError +from astroid import Function, If, InferenceError -from ..interfaces import IAstroidChecker -from . import BaseChecker -from .utils import check_messages +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import check_messages import re diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py index 3031075f..8ac00a5f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py @@ -16,14 +16,14 @@ """ import sys -from ..logilab.common.compat import builtins +from logilab.common.compat import builtins BUILTINS_NAME = builtins.__name__ -from .. import astroid -from ..astroid import YES, Instance, unpack_infer +import astroid +from astroid import YES, Instance, unpack_infer -from . import BaseChecker -from .utils import is_empty, is_raising, check_messages -from ..interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import is_empty, is_raising, check_messages +from pylint.interfaces import IAstroidChecker OVERGENERAL_EXCEPTIONS = ('Exception',) @@ -46,7 +46,11 @@ 'notimplemented-raised', 'Used when NotImplemented is raised instead of \ NotImplementedError'), - + 'E0712': ('Catching an exception which doesn\'t inherit from BaseException: %s', + 'catching-non-exception', + 'Used when a class which doesn\'t inherit from \ + BaseException is used as an exception in an except clause.'), + 'W0701': ('Raising a string exception', 'raising-string', 'Used when a string exception is raised.'), @@ -160,13 +164,14 @@ def _check_raise_value(self, node, expr): value_found = False return value_found - @check_messages('W0712') def visit_excepthandler(self, node): """Visit an except handler block and check for exception unpacking.""" if isinstance(node.name, (astroid.Tuple, astroid.List)): self.add_message('W0712', node=node) - @check_messages('W0702', 'W0703', 'W0704', 'W0711', 'E0701') + + + @check_messages('W0702', 'W0703', 'W0704', 'W0711', 'E0701', 'catching-non-exception') def visit_tryexcept(self, node): """check for empty except""" exceptions_classes = [] @@ -206,6 +211,13 @@ def visit_tryexcept(self, node): and exc.root().name == EXCEPTIONS_MODULE and nb_handlers == 1 and not is_raising(handler.body)): self.add_message('W0703', args=exc.name, node=handler.type) + + if (not inherit_from_std_ex(exc) and + exc.root().name != BUILTINS_NAME): + self.add_message('catching-non-exception', + node=handler.type, + args=(exc.name, )) + exceptions_classes += excs diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py index bf53234a..f307f33d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py @@ -21,18 +21,40 @@ Some parts of the process_token method is based from The Tab Nanny std module. """ -import re, sys +import keyword +import sys import tokenize + if not hasattr(tokenize, 'NL'): raise ValueError("tokenize.NL doesn't exist -- tokenize module too old") -from ..logilab.common.textutils import pretty_match -from ..astroid import nodes +from astroid import nodes + +from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker +from pylint.checkers import BaseTokenChecker +from pylint.checkers.utils import check_messages +from pylint.utils import WarningScope, OPTION_RGX + +_KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not', + 'print', 'raise', 'return', 'while', 'yield'] + +_SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=', + '+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=', + '%=', '>>=', '<<='] +_OPENING_BRACKETS = ['(', '[', '{'] +_CLOSING_BRACKETS = [')', ']', '}'] + +_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT]) -from ..interfaces import ITokenChecker, IAstroidChecker -from . import BaseTokenChecker -from .utils import check_messages -from ..utils import WarningScope, OPTION_RGX +# Whitespace checking policy constants +_MUST = 0 +_MUST_NOT = 1 +_IGNORE = 2 + +# Whitespace checking config constants +_DICT_SEPARATOR = 'dict-separator' +_TRAILING_COMMA = 'trailing-comma' +_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR] MSGS = { 'C0301': ('Line too long (%s/%s)', @@ -64,22 +86,20 @@ 'multiple-statements', 'Used when more than on statement are found on the same line.', {'scope': WarningScope.NODE}), - 'C0322': ('Operator not preceded by a space\n%s', - 'no-space-before-operator', - 'Used when one of the following operator (!= | <= | == | >= | < ' - '| > | = | \\+= | -= | \\*= | /= | %) is not preceded by a space.', - {'scope': WarningScope.NODE}), - 'C0323': ('Operator not followed by a space\n%s', - 'no-space-after-operator', - 'Used when one of the following operator (!= | <= | == | >= | < ' - '| > | = | \\+= | -= | \\*= | /= | %) is not followed by a space.', - {'scope': WarningScope.NODE}), - 'C0324': ('Comma not followed by a space\n%s', - 'no-space-after-comma', - 'Used when a comma (",") is not followed by a space.', - {'scope': WarningScope.NODE}), + 'C0325' : ('Unnecessary parens after %r keyword', + 'superfluous-parens', + 'Used when a single item in parentheses follows an if, for, or ' + 'other keyword.'), + 'C0326': ('%s space %s %s %s\n%s', + 'bad-whitespace', + ('Used when a wrong number of spaces is used around an operator, ' + 'bracket or block opener.'), + {'old_names': [('C0323', 'no-space-after-operator'), + ('C0324', 'no-space-after-comma'), + ('C0322', 'no-space-before-operator')]}) } + if sys.version_info < (3, 0): MSGS.update({ @@ -99,74 +119,21 @@ {'scope': WarningScope.NODE}), }) -# simple quoted string rgx -SQSTRING_RGX = r'"([^"\\]|\\.)*?"' -# simple apostrophed rgx -SASTRING_RGX = r"'([^'\\]|\\.)*?'" -# triple quoted string rgx -TQSTRING_RGX = r'"""([^"]|("(?!"")))*?(""")' -# triple apostrophe'd string rgx -TASTRING_RGX = r"'''([^']|('(?!'')))*?(''')" - -# finally, the string regular expression -STRING_RGX = re.compile('(%s)|(%s)|(%s)|(%s)' % (TQSTRING_RGX, TASTRING_RGX, - SQSTRING_RGX, SASTRING_RGX), - re.MULTILINE|re.DOTALL) - -COMMENT_RGX = re.compile("#.*$", re.M) - -OPERATORS = r'!=|<=|==|>=|<|>|=|\+=|-=|\*=|/=|%' - -OP_RGX_MATCH_1 = r'[^(]*(?|=|\+|-|\*|/|!|%%|&|\|)(%s).*' % OPERATORS -OP_RGX_SEARCH_1 = r'(?|=|\+|-|\*|/|!|%%|&|\|)(%s)' % OPERATORS - -OP_RGX_MATCH_2 = r'[^(]*(%s)(?!\s|=|>|<).*' % OPERATORS -OP_RGX_SEARCH_2 = r'(%s)(?!\s|=|>)' % OPERATORS - -BAD_CONSTRUCT_RGXS = ( - - (re.compile(OP_RGX_MATCH_1, re.M), - re.compile(OP_RGX_SEARCH_1, re.M), - 'C0322'), - (re.compile(OP_RGX_MATCH_2, re.M), - re.compile(OP_RGX_SEARCH_2, re.M), - 'C0323'), +def _underline_token(token): + length = token[3][1] - token[2][1] + offset = token[2][1] + return token[4] + (' ' * offset) + ('^' * length) - (re.compile(r'.*,[^(\s|\]|}|\))].*', re.M), - re.compile(r',[^\s)]', re.M), - 'C0324'), - ) - -def get_string_coords(line): - """return a list of string positions (tuple (start, end)) in the line - """ - result = [] - for match in re.finditer(STRING_RGX, line): - result.append( (match.start(), match.end()) ) - return result - -def in_coords(match, string_coords): - """return true if the match is in the string coord""" - mstart = match.start() - for start, end in string_coords: - if mstart >= start and mstart < end: - return True - return False - -def check_line(line): - """check a line for a bad construction - if it founds one, return a message describing the problem - else return None - """ - cleanstr = COMMENT_RGX.sub('', STRING_RGX.sub('', line)) - for rgx_match, rgx_search, msg_id in BAD_CONSTRUCT_RGXS: - if rgx_match.match(cleanstr): - string_positions = get_string_coords(line) - for match in re.finditer(rgx_search, line): - if not in_coords(match, string_positions): - return msg_id, pretty_match(match, line.rstrip()) +def _column_distance(token1, token2): + if token1 == token2: + return 0 + if token2[3] < token1[3]: + token1, token2 = token2, token1 + if token1[3][0] != token2[2][0]: + return None + return token2[2][1] - token1[3][1] class FormatChecker(BaseTokenChecker): @@ -177,7 +144,7 @@ class FormatChecker(BaseTokenChecker): * use of <> instead of != """ - __implements__ = (ITokenChecker, IAstroidChecker) + __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker) # configuration section name name = 'format' @@ -193,6 +160,16 @@ class FormatChecker(BaseTokenChecker): 'default': r'^\s*(# )??$', 'help': ('Regexp for a line that is allowed to be longer than ' 'the limit.')}), + ('single-line-if-stmt', + {'default': False, 'type' : 'yn', 'metavar' : '', + 'help' : ('Allow the body of an if to be on the same ' + 'line as the test if there is no else.')}), + ('no-space-check', + {'default': ','.join(_NO_SPACE_CHECK_CHOICES), + 'type': 'multiple_choice', + 'choices': _NO_SPACE_CHECK_CHOICES, + 'help': ('List of optional constructs for which whitespace ' + 'checking is disabled')}), ('max-module-lines', {'default' : 1000, 'type' : 'int', 'metavar' : '', 'help': 'Maximum number of lines in a module'} @@ -213,6 +190,225 @@ def new_line(self, tok_type, line, line_num, junk): self._lines[line_num] = line.split('\n')[0] self.check_lines(line, line_num) + def process_module(self, module): + self._keywords_with_parens = set() + for node in module.body: + if (isinstance(node, nodes.From) and node.modname == '__future__' + and any(name == 'print_function' for name, _ in node.names)): + self._keywords_with_parens.add('print') + + def _check_keyword_parentheses(self, tokens, start): + """Check that there are not unnecessary parens after a keyword. + + Parens are unnecessary if there is exactly one balanced outer pair on a + line, and it is followed by a colon, and contains no commas (i.e. is not a + tuple). + + Args: + tokens: list of Tokens; the entire list of Tokens. + start: int; the position of the keyword in the token list. + """ + # If the next token is not a paren, we're fine. + if tokens[start+1][1] != '(': + return + + found_comma = False + found_and_or = False + depth = 0 + keyword_token = tokens[start][1] + line_num = tokens[start][2][0] + + for i in xrange(start, len(tokens) - 1): + token = tokens[i] + + # If we hit a newline, then assume any parens were for continuation. + if token[0] == tokenize.NL: + return + + if token[1] == '(': + depth += 1 + elif token[1] == ')': + depth -= 1 + if not depth: + # ')' can't happen after if (foo), since it would be a syntax error. + if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or + tokens[i+1][0] in (tokenize.NEWLINE, tokenize.ENDMARKER, + tokenize.COMMENT)): + # The empty tuple () is always accepted. + if i == start + 2: + return + if keyword_token == 'not': + if not found_and_or: + self.add_message('C0325', line=line_num, + args=keyword_token) + elif keyword_token in ('return', 'yield'): + self.add_message('C0325', line=line_num, + args=keyword_token) + elif keyword_token not in self._keywords_with_parens: + if not (tokens[i+1][1] == 'in' and found_and_or): + self.add_message('C0325', line=line_num, + args=keyword_token) + return + elif depth == 1: + # This is a tuple, which is always acceptable. + if token[1] == ',': + return + # 'and' and 'or' are the only boolean operators with lower precedence + # than 'not', so parens are only required when they are found. + elif token[1] in ('and', 'or'): + found_and_or = True + # A yield inside an expression must always be in parentheses, + # quit early without error. + elif token[1] == 'yield': + return + # A generator expression always has a 'for' token in it, and + # the 'for' token is only legal inside parens when it is in a + # generator expression. The parens are necessary here, so bail + # without an error. + elif token[1] == 'for': + return + + def _opening_bracket(self, tokens, i): + self._bracket_stack.append(tokens[i][1]) + # Special case: ignore slices + if tokens[i][1] == '[' and tokens[i+1][1] == ':': + return + + if (i > 0 and (tokens[i-1][0] == tokenize.NAME and + not (keyword.iskeyword(tokens[i-1][1])) + or tokens[i-1][1] in _CLOSING_BRACKETS)): + self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT)) + else: + self._check_space(tokens, i, (_IGNORE, _MUST_NOT)) + + def _closing_bracket(self, tokens, i): + self._bracket_stack.pop() + # Special case: ignore slices + if tokens[i-1][1] == ':' and tokens[i][1] == ']': + return + policy_before = _MUST_NOT + if tokens[i][1] in _CLOSING_BRACKETS and tokens[i-1][1] == ',': + if _TRAILING_COMMA in self.config.no_space_check: + policy_before = _IGNORE + + self._check_space(tokens, i, (policy_before, _IGNORE)) + + def _check_equals_spacing(self, tokens, i): + """Check the spacing of a single equals sign.""" + if self._inside_brackets('(') or self._inside_brackets('lambda'): + self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT)) + else: + self._check_space(tokens, i, (_MUST, _MUST)) + + def _open_lambda(self, unused_tokens, unused_i): + self._bracket_stack.append('lambda') + + def _handle_colon(self, tokens, i): + # Special case: ignore slices + if self._inside_brackets('['): + return + if (self._inside_brackets('{') and + _DICT_SEPARATOR in self.config.no_space_check): + policy = (_IGNORE, _IGNORE) + else: + policy = (_MUST_NOT, _MUST) + self._check_space(tokens, i, policy) + + if self._inside_brackets('lambda'): + self._bracket_stack.pop() + + def _handle_comma(self, tokens, i): + # Only require a following whitespace if this is + # not a hanging comma before a closing bracket. + if tokens[i+1][1] in _CLOSING_BRACKETS: + self._check_space(tokens, i, (_MUST_NOT, _IGNORE)) + else: + self._check_space(tokens, i, (_MUST_NOT, _MUST)) + + def _check_surrounded_by_space(self, tokens, i): + """Check that a binary operator is surrounded by exactly one space.""" + self._check_space(tokens, i, (_MUST, _MUST)) + + def _check_space(self, tokens, i, policies): + def _policy_string(policy): + if policy == _MUST: + return 'Exactly one', 'required' + else: + return 'No', 'allowed' + + def _name_construct(token): + if tokens[i][1] == ',': + return 'comma' + elif tokens[i][1] == ':': + return ':' + elif tokens[i][1] in '()[]{}': + return 'bracket' + elif tokens[i][1] in ('<', '>', '<=', '>=', '!='): + return 'comparison' + else: + if self._inside_brackets('('): + return 'keyword argument assignment' + else: + return 'assignment' + + good_space = [True, True] + pairs = [(tokens[i-1], tokens[i]), (tokens[i], tokens[i+1])] + + for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)): + current_idx = 1 - other_idx + if token_pair[other_idx][0] in _EOL or policy == _IGNORE: + continue + + distance = _column_distance(*token_pair) + if distance is None: + continue + good_space[other_idx] = ( + (policy == _MUST and distance == 1) or + (policy == _MUST_NOT and distance == 0)) + + warnings = [] + if not any(good_space) and policies[0] == policies[1]: + warnings.append((policies[0], 'around')) + else: + for ok, policy, position in zip(good_space, policies, ('before', 'after')): + if not ok: + warnings.append((policy, position)) + for policy, position in warnings: + construct = _name_construct(tokens[i]) + count, state = _policy_string(policy) + self.add_message('C0326', line=tokens[i][2][0], + args=(count, state, position, construct, + _underline_token(tokens[i]))) + + def _inside_brackets(self, left): + return self._bracket_stack[-1] == left + + def _prepare_token_dispatcher(self): + raw = [ + (_KEYWORD_TOKENS, + self._check_keyword_parentheses), + + (_OPENING_BRACKETS, self._opening_bracket), + + (_CLOSING_BRACKETS, self._closing_bracket), + + (['='], self._check_equals_spacing), + + (_SPACED_OPERATORS, self._check_surrounded_by_space), + + ([','], self._handle_comma), + + ([':'], self._handle_colon), + + (['lambda'], self._open_lambda), + ] + + dispatch = {} + for tokens, handler in raw: + for token in tokens: + dispatch[token] = handler + return dispatch + def process_tokens(self, tokens): """process tokens and search for : @@ -222,6 +418,7 @@ def process_tokens(self, tokens): _ optionally bad construct (if given, bad_construct must be a compiled regular expression). """ + self._bracket_stack = [None] indent = tokenize.INDENT dedent = tokenize.DEDENT newline = tokenize.NEWLINE @@ -233,7 +430,8 @@ def process_tokens(self, tokens): self._lines = {} self._visited_lines = {} new_line_delay = False - for (tok_type, token, start, _, line) in tokens: + token_handlers = self._prepare_token_dispatcher() + for idx, (tok_type, token, start, _, line) in enumerate(tokens): if new_line_delay: new_line_delay = False self.new_line(tok_type, line, line_num, junk) @@ -292,6 +490,13 @@ def process_tokens(self, tokens): check_equal = 0 self.check_indent_level(line, indents[-1], line_num) + try: + handler = token_handlers[token] + except KeyError: + pass + else: + handler(tokens, idx) + line_num -= 1 # to be ok with "wc -l" if line_num > self.config.max_module_lines: self.add_message('C0302', args=line_num, line=1) @@ -307,16 +512,19 @@ def visit_default(self, node): if prev_sibl is not None: prev_line = prev_sibl.fromlineno else: - prev_line = node.parent.statement().fromlineno + # The line on which a finally: occurs in a try/finally + # is not directly represented in the AST. We infer it + # by taking the last line of the body and adding 1, which + # should be the line of finally: + if (isinstance(node.parent, nodes.TryFinally) + and node in node.parent.finalbody): + prev_line = node.parent.body[0].tolineno + 1 + else: + prev_line = node.parent.statement().fromlineno line = node.fromlineno assert line, node if prev_line == line and self._visited_lines.get(line) != 2: - # py2.5 try: except: finally: - if not (isinstance(node, nodes.TryExcept) - and isinstance(node.parent, nodes.TryFinally) - and node.fromlineno == node.parent.fromlineno): - self.add_message('C0321', node=node) - self._visited_lines[line] = 2 + self._check_multi_statement_line(node, line) return if line in self._visited_lines: return @@ -332,13 +540,23 @@ def visit_default(self, node): lines.append(self._lines[line].rstrip()) except KeyError: lines.append('') - try: - msg_def = check_line('\n'.join(lines)) - if msg_def: - self.add_message(msg_def[0], node=node, args=msg_def[1]) - except KeyError: - # FIXME: internal error ! - pass + + def _check_multi_statement_line(self, node, line): + """Check for lines containing multiple statements.""" + # Do not warn about multiple nested context managers + # in with statements. + if isinstance(node, nodes.With): + return + # For try... except... finally..., the two nodes + # appear to be on the same line due to how the AST is built. + if (isinstance(node, nodes.TryExcept) and + isinstance(node.parent, nodes.TryFinally)): + return + if (isinstance(node.parent, nodes.If) and not node.parent.orelse + and self.config.single_line_if_stmt): + return + self.add_message('C0321', node=node) + self._visited_lines[line] = 2 @check_messages('W0333') def visit_backquote(self, node): @@ -355,7 +573,7 @@ def check_lines(self, lines, i): self.add_message('C0304', line=i) else: stripped_line = line.rstrip() - if line != stripped_line + '\n': + if line[len(stripped_line):] not in ('\n', '\r\n'): self.add_message('C0303', line=i) # Don't count excess whitespace in the line length. line = stripped_line diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py index 3c321bbb..1dd77879 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py @@ -15,17 +15,17 @@ # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """imports checkers for Python code""" -from ..logilab.common.graph import get_cycles, DotBackend -from ..logilab.common.modutils import is_standard_module -from ..logilab.common.ureports import VerbatimText, Paragraph +from logilab.common.graph import get_cycles, DotBackend +from logilab.common.modutils import is_standard_module +from logilab.common.ureports import VerbatimText, Paragraph -from .. import astroid -from ..astroid import are_exclusive +import astroid +from astroid import are_exclusive -from ..interfaces import IAstroidChecker -from ..utils import EmptyReport -from . import BaseChecker -from .utils import check_messages +from pylint.interfaces import IAstroidChecker +from pylint.utils import EmptyReport +from pylint.checkers import BaseChecker +from pylint.checkers.utils import check_messages def get_first_import(node, context, name, base, level): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py index 5f2381ca..6986ca4e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py @@ -14,9 +14,11 @@ """checker for use of Python logging """ -from .. import astroid -from . import BaseChecker, utils -from .. import interfaces +import astroid +from pylint import checkers +from pylint import interfaces +from pylint.checkers import utils +from pylint.checkers.utils import check_messages MSGS = { 'W1201': ('Specify string format arguments as logging function parameters', @@ -52,7 +54,7 @@ 'warning']) -class LoggingChecker(BaseChecker): +class LoggingChecker(checkers.BaseChecker): """Checks use of the logging module.""" __implements__ = interfaces.IAstroidChecker @@ -75,7 +77,7 @@ def visit_import(self, node): else: self._logging_name = 'logging' - @utils.check_messages(*(MSGS.keys())) + @check_messages(*(MSGS.keys())) def visit_callfunc(self, node): """Checks calls to (simple forms of) logging methods.""" if (not isinstance(node.func, astroid.Getattr) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py index dee53e62..69959090 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py @@ -19,8 +19,8 @@ import re -from ..interfaces import IRawChecker -from . import BaseChecker +from pylint.interfaces import IRawChecker +from pylint.checkers import BaseChecker MSGS = { diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py index 715282a9..98321954 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py @@ -17,11 +17,11 @@ """ import sys -from .. import astroid +import astroid -from ..interfaces import IAstroidChecker -from . import BaseChecker -from .utils import check_messages +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import check_messages MSGS = { 'E1001': ('Use of __slots__ on an old style class', diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py index 4d450da1..a8e4367c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py @@ -25,12 +25,12 @@ #if not hasattr(tokenize, 'NL'): # raise ValueError("tokenize.NL doesn't exist -- tokenize module too old") -from ..logilab.common.ureports import Table +from logilab.common.ureports import Table -from ..interfaces import ITokenChecker -from ..utils import EmptyReport -from ..checkers import BaseTokenChecker -from ..reporters import diff_string +from pylint.interfaces import ITokenChecker +from pylint.utils import EmptyReport +from pylint.checkers import BaseTokenChecker +from pylint.reporters import diff_string def report_raw_stats(sect, stats, old_stats): """calculate percentage of code / doc / comment / empty diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py index f22116e9..26b37255 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py @@ -19,10 +19,10 @@ import sys from itertools import izip -from ..logilab.common.ureports import Table +from logilab.common.ureports import Table -from ..interfaces import IRawChecker -from . import BaseChecker, table_lines_from_stats +from pylint.interfaces import IRawChecker +from pylint.checkers import BaseChecker, table_lines_from_stats class Similar(object): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py index 51450983..07e1fbe1 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py @@ -18,10 +18,11 @@ import re import sys -from .. import astroid +import astroid -from ..interfaces import IAstroidChecker -from . import BaseChecker, BaseTokenChecker, utils +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker, BaseTokenChecker +from pylint.checkers import utils _VALID_OPEN_MODE_REGEX = r'^(r?U|[rwa]\+?b?)$' diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py index 3d5e8051..42563da7 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py @@ -21,10 +21,12 @@ import sys import tokenize -from .. import astroid +import astroid -from ..interfaces import ITokenChecker, IAstroidChecker -from . import BaseChecker, BaseTokenChecker, utils +from pylint.interfaces import ITokenChecker, IAstroidChecker +from pylint.checkers import BaseChecker, BaseTokenChecker +from pylint.checkers import utils +from pylint.checkers.utils import check_messages _PY3K = sys.version_info >= (3, 0) @@ -84,7 +86,7 @@ class StringFormatChecker(BaseChecker): name = 'string' msgs = MSGS - @utils.check_messages(*(MSGS.keys())) + @check_messages(*(MSGS.keys())) def visit_binop(self, node): if node.op != '%': return @@ -175,7 +177,7 @@ class StringMethodsChecker(BaseChecker): " duplicate character, "), } - @utils.check_messages(*(MSGS.keys())) + @check_messages(*(MSGS.keys())) def visit_callfunc(self, node): func = utils.safe_infer(node.func) if (isinstance(func, astroid.BoundMethod) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py index e8938827..69883592 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py @@ -19,12 +19,12 @@ import re import shlex -from .. import astroid -from ..astroid import InferenceError, NotFoundError, YES, Instance +import astroid +from astroid import InferenceError, NotFoundError, YES, Instance -from ..interfaces import IAstroidChecker -from . import BaseChecker -from .utils import safe_infer, is_super, check_messages +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import safe_infer, is_super, check_messages MSGS = { 'E1101': ('%s %r has no %r member', diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py index 1a7dca9e..72a9733d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py @@ -21,10 +21,9 @@ import re import string -from .. import astroid - -from ..astroid import scoped_nodes -from ..logilab.common.compat import builtins +import astroid +from astroid import scoped_nodes +from logilab.common.compat import builtins BUILTINS_NAME = builtins.__name__ @@ -61,7 +60,7 @@ def clobber_in_except(node): (False, None) otherwise. """ if isinstance(node, astroid.AssAttr): - return (True, (node.attrname, 'object %r' % (node.expr.name,))) + return (True, (node.attrname, 'object %r' % (node.expr.as_string(),))) elif isinstance(node, astroid.AssName): name = node.name if is_builtin(name): @@ -163,6 +162,11 @@ def is_defined_before(var_node): if getattr(_node, 'name', None) == varname: return True break + elif isinstance(_node, astroid.ExceptHandler): + if isinstance(_node.name, astroid.AssName): + ass_node=_node.name + if ass_node.name == varname: + return True _node = _node.parent # possibly multiple statements on the same line using semi colon separator stmt = var_node.statement() @@ -395,10 +399,10 @@ def get_argument_from_call(callfunc_node, position=None, keyword=None): :raises NoSuchArgumentError: if no argument at the provided position or with the provided keyword. """ - if not position and not keyword: + if position is None and keyword is None: raise ValueError('Must specify at least one of: position or keyword.') try: - if position and not isinstance(callfunc_node.args[position], astroid.Keyword): + if position is not None and not isinstance(callfunc_node.args[position], astroid.Keyword): return callfunc_node.args[position] except IndexError as error: raise NoSuchArgumentError(error) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py index afe0f945..7f4ff1be 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py @@ -19,12 +19,12 @@ import sys from copy import copy -from .. import astroid -from ..astroid import are_exclusive, builtin_lookup, AstroidBuildingException +import astroid +from astroid import are_exclusive, builtin_lookup, AstroidBuildingException -from ..interfaces import IAstroidChecker -from . import BaseChecker -from .utils import (PYMETHODS, is_ancestor_name, is_builtin, +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin, is_defined_before, is_error, is_func_default, is_func_decorator, assign_parent, check_messages, is_inside_except, clobber_in_except, get_all_elements) @@ -119,11 +119,18 @@ def overridden_method(klass, name): a list comprehension or a generator expression) is used outside \ the loop.'), - 'W0632': ('Possible unbalanced tuple unpacking: ' + 'W0632': ('Possible unbalanced tuple unpacking with ' + 'sequence at line %s: ' 'left side has %d label(s), right side has %d value(s)', 'unbalanced-tuple-unpacking', 'Used when there is an unbalanced tuple unpacking in assignment'), + 'W0633': ('Attempting to unpack a non-sequence with ' + 'non-sequence at line %s', + 'unpacking-non-sequence', + 'Used when something which is not ' + 'a sequence is used in an unpack assignment'), + } class VariablesChecker(BaseChecker): @@ -542,26 +549,65 @@ def visit_from(self, node): continue self._check_module_attrs(node, module, name.split('.')) - @check_messages('unbalanced-tuple-unpacking') + @check_messages('unbalanced-tuple-unpacking', 'unpacking-non-sequence') def visit_assign(self, node): - """Check unbalanced tuple unpacking for assignments""" + """Check unbalanced tuple unpacking for assignments + and unpacking non-sequences. + """ if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)): return - try: - infered = node.value.infer().next() - except astroid.InferenceError: - return - if not isinstance(infered, (astroid.Tuple, astroid.List)): - return + targets = node.targets[0].itered() - values = infered.itered() if any(not isinstance(target_node, astroid.AssName) for target_node in targets): return - if len(targets) != len(values): - self.add_message('unbalanced-tuple-unpacking', - node=node, - args=(len(targets), len(values))) + + try: + for infered in node.value.infer(): + self._check_unpacking(infered, node, targets) + except astroid.InferenceError: + return + + def _check_unpacking(self, infered, node, targets): + """ Check for unbalanced tuple unpacking + and unpacking non sequences. + """ + if isinstance(infered, (astroid.Tuple, astroid.List)): + values = infered.itered() + if len(targets) != len(values): + if node.root().name == infered.root().name: + location = infered.lineno or 'unknown' + else: + location = '%s (%s)' % (infered.lineno or 'unknown', + infered.root().name) + + self.add_message('unbalanced-tuple-unpacking', + node=node, + args=(location, + len(targets), + len(values))) + else: + if infered is astroid.YES: + return + + for meth in ('__iter__', '__getitem__'): + try: + infered.getattr(meth) + except astroid.NotFoundError: + continue + else: + break + else: + if node.root().name == infered.root().name: + location = infered.lineno or 'unknown' + else: + location = '%s (%s)' % (infered.lineno or 'unknown', + infered.root().name) + + self.add_message('unpacking-non-sequence', + node=node, + args=(location, )) + def _check_module_attrs(self, node, module, module_names): """check that module_names (list of string) are accessible through the diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py index 192f2548..a65a1162 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py @@ -34,12 +34,6 @@ else: PYLINT_HOME = join(USER_HOME, '.pylint.d') -if not exists(PYLINT_HOME): - try: - os.mkdir(PYLINT_HOME) - except OSError: - print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME - def get_pdata_path(base_name, recurs): """return the path of the file which should contain old search data for the given base_name with the given options values @@ -66,6 +60,11 @@ def load_results(base): def save_results(results, base): """pickle results""" + if not exists(PYLINT_HOME): + try: + os.mkdir(PYLINT_HOME) + except OSError: + print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME data_file = get_pdata_path(base, 1) try: pickle.dump(results, open(data_file, _PICK_MOD)) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py index 7193c65a..e0754ce0 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py @@ -12,7 +12,7 @@ # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """Interfaces for PyLint objects""" -from .logilab.common.interface import Interface +from logilab.common.interface import Interface class IChecker(Interface): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py index 25c5377b..812a953b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py @@ -27,37 +27,37 @@ """ # import this first to avoid builtin namespace pollution -from .checkers import utils +from pylint.checkers import utils import sys import os import tokenize from warnings import warn -from .logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn -from .logilab.common.optik_ext import check_csv -from .logilab.common.modutils import load_module_from_name, get_module_part -from .logilab.common.interface import implements -from .logilab.common.textutils import splitstrip -from .logilab.common.ureports import Table, Text, Section -from .logilab.common.__pkginfo__ import version as common_version +from logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn +from logilab.common.optik_ext import check_csv +from logilab.common.modutils import load_module_from_name, get_module_part +from logilab.common.interface import implements +from logilab.common.textutils import splitstrip +from logilab.common.ureports import Table, Text, Section +from logilab.common.__pkginfo__ import version as common_version -from .astroid import MANAGER, nodes, AstroidBuildingException -from .astroid.__pkginfo__ import version as astroid_version +from astroid import MANAGER, nodes, AstroidBuildingException +from astroid.__pkginfo__ import version as astroid_version -from .utils import ( +from pylint.utils import ( MSG_TYPES, OPTION_RGX, PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn, ReportsHandlerMixIn, EmptyReport, WarningScope, expand_modules, tokenize_module) -from .interfaces import IRawChecker, ITokenChecker, IAstroidChecker -from .checkers import (BaseTokenChecker, +from pylint.interfaces import IRawChecker, ITokenChecker, IAstroidChecker +from pylint.checkers import (BaseTokenChecker, table_lines_from_stats, initialize as checkers_initialize) -from .reporters import initialize as reporters_initialize -from . import config +from pylint.reporters import initialize as reporters_initialize +from pylint import config -from .__pkginfo__ import version +from pylint.__pkginfo__ import version @@ -262,7 +262,7 @@ def make_options(): 'group': 'Reports', 'help' : ('Template used to display messages. ' 'This is a python new-style format string ' - 'used to format the massage information. ' + 'used to format the message information. ' 'See doc for all details') }), # msg-template ) @@ -518,7 +518,7 @@ def collect_block_lines(self, node, msg_state): if first <= lineno <= last: # Set state for all lines for this block, if the # warning is applied to nodes. - if self._messages[msgid].scope == WarningScope.NODE: + if self.check_message_id(msgid).scope == WarningScope.NODE: if lineno > firstchildlineno: state = True first_, last_ = node.block_range(lineno) @@ -563,6 +563,22 @@ def prepare_checkers(self): checker.active_msgs = messages return neededcheckers + def should_analyze_file(self, modname, path): + """Returns whether or not a module should be checked. + + This implementation returns True for all inputs, indicating that all + files should be linted. + + Subclasses may override this method to indicate that modules satisfying + certain conditions should not be linted. + + :param str modname: The name of the module to be checked. + :param str path: The full path to the source code of the module. + :returns: True if the module should be checked. + :rtype: bool + """ + return True + def check(self, files_or_modules): """main checking entry: check a list of files or modules from their name. @@ -582,12 +598,14 @@ def check(self, files_or_modules): # build ast and check modules or packages for descr in self.expand_files(files_or_modules): modname, filepath = descr['name'], descr['path'] + if not self.should_analyze_file(modname, filepath): + continue if self.config.files_output: reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension) self.reporter.set_output(open(reportfile, 'w')) self.set_current_module(modname, filepath) # get the module representation - astroid = self.get_astroid(filepath, modname) + astroid = self.get_ast(filepath, modname) if astroid is None: continue self.base_name = descr['basename'] @@ -639,8 +657,8 @@ def set_current_module(self, modname, filepath=None): self._raw_module_msgs_state = {} self._ignored_msgs = {} - def get_astroid(self, filepath, modname): - """return a astroid representation for a module""" + def get_ast(self, filepath, modname): + """return a ast(roid) representation for a module""" try: return MANAGER.ast_from_file(filepath, modname, source=True) except SyntaxError, ex: @@ -717,6 +735,8 @@ def close(self): # save results if persistent run if self.config.persistent: config.save_results(self.stats, self.base_name) + else: + self.reporter.on_close(self.stats, {}) # specific reports ######################################################## @@ -819,7 +839,7 @@ def report_messages_by_module_stats(sect, stats, _): # this may help to import modules using gettext # XXX syt, actually needed since we don't import code? -from .logilab.common.compat import builtins +from logilab.common.compat import builtins builtins._ = str @@ -1037,7 +1057,7 @@ def cb_generate_config(self, *args, **kwargs): def cb_generate_manpage(self, *args, **kwargs): """optik callback for sample config file generation""" - from . import __pkginfo__ + from pylint import __pkginfo__ self.linter.generate_manpage(__pkginfo__) sys.exit(0) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py deleted file mode 100644 index 756c5e4a..00000000 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/logilab/common/deprecation.py +++ /dev/null @@ -1,130 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Deprecation utilities.""" - -__docformat__ = "restructuredtext en" - -import sys -from warnings import warn - -class class_deprecated(type): - """metaclass to print a warning on instantiation of a deprecated class""" - - def __call__(cls, *args, **kwargs): - msg = getattr(cls, "__deprecation_warning__", - "%(cls)s is deprecated") % {'cls': cls.__name__} - warn(msg, DeprecationWarning, stacklevel=2) - return type.__call__(cls, *args, **kwargs) - - -def class_renamed(old_name, new_class, message=None): - """automatically creates a class which fires a DeprecationWarning - when instantiated. - - >>> Set = class_renamed('Set', set, 'Set is now replaced by set') - >>> s = Set() - sample.py:57: DeprecationWarning: Set is now replaced by set - s = Set() - >>> - """ - clsdict = {} - if message is None: - message = '%s is deprecated, use %s' % (old_name, new_class.__name__) - clsdict['__deprecation_warning__'] = message - try: - # new-style class - return class_deprecated(old_name, (new_class,), clsdict) - except (NameError, TypeError): - # old-style class - class DeprecatedClass(new_class): - """FIXME: There might be a better way to handle old/new-style class - """ - def __init__(self, *args, **kwargs): - warn(message, DeprecationWarning, stacklevel=2) - new_class.__init__(self, *args, **kwargs) - return DeprecatedClass - - -def class_moved(new_class, old_name=None, message=None): - """nice wrapper around class_renamed when a class has been moved into - another module - """ - if old_name is None: - old_name = new_class.__name__ - if message is None: - message = 'class %s is now available as %s.%s' % ( - old_name, new_class.__module__, new_class.__name__) - return class_renamed(old_name, new_class, message) - -def deprecated(reason=None, stacklevel=2, name=None, doc=None): - """Decorator that raises a DeprecationWarning to print a message - when the decorated function is called. - """ - def deprecated_decorator(func): - message = reason or 'The function "%s" is deprecated' - if '%s' in message: - message = message % func.func_name - def wrapped(*args, **kwargs): - warn(message, DeprecationWarning, stacklevel=stacklevel) - return func(*args, **kwargs) - try: - wrapped.__name__ = name or func.__name__ - except TypeError: # readonly attribute in 2.3 - pass - wrapped.__doc__ = doc or func.__doc__ - return wrapped - return deprecated_decorator - -def moved(modpath, objname): - """use to tell that a callable has been moved to a new module. - - It returns a callable wrapper, so that when its called a warning is printed - telling where the object can be found, import is done (and not before) and - the actual object is called. - - NOTE: the usage is somewhat limited on classes since it will fail if the - wrapper is use in a class ancestors list, use the `class_moved` function - instead (which has no lazy import feature though). - """ - def callnew(*args, **kwargs): - from .modutils import load_module_from_name - message = "object %s has been moved to module %s" % (objname, modpath) - warn(message, DeprecationWarning, stacklevel=2) - m = load_module_from_name(modpath) - return getattr(m, objname)(*args, **kwargs) - return callnew - - - -class DeprecationWrapper(object): - """proxy to print a warning on access to any attribute of the wrapped object - """ - def __init__(self, proxied, msg=None): - self._proxied = proxied - self._msg = msg - - def __getattr__(self, attr): - warn(self._msg, DeprecationWarning, stacklevel=2) - return getattr(self._proxied, attr) - - def __setattr__(self, attr, value): - if attr in ('_proxied', '_msg'): - self.__dict__[attr] = value - else: - warn(self._msg, DeprecationWarning, stacklevel=2) - setattr(self._proxied, attr, value) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py index d09a0360..53064c73 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py @@ -17,7 +17,9 @@ import locale import os -from .. import utils +from pylint.utils import MSG_TYPES + +from pylint import utils CMPS = ['=', '-', '+'] @@ -44,7 +46,7 @@ def __init__(self, reporter, msg_id, location, msg): self.path = self.abspath.replace(reporter.path_strip_prefix, '') self.msg = msg self.C = msg_id[0] - self.category = utils.MSG_TYPES[msg_id[0]] + self.category = MSG_TYPES[msg_id[0]] self.symbol = reporter.linter.check_message_id(msg_id).symbol def format(self, template): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py index 211bcd2b..331eb179 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/guireporter.py @@ -1,10 +1,10 @@ """ reporter used by gui.py """ -import sys -from ..logilab.common.ureports import TextWriter +import sys -from . import BaseReporter -from ..interfaces import IReporter +from pylint.interfaces import IReporter +from pylint.reporters import BaseReporter, Message +from logilab.common.ureports import TextWriter class GUIReporter(BaseReporter): @@ -16,16 +16,12 @@ class GUIReporter(BaseReporter): def __init__(self, gui, output=sys.stdout): """init""" BaseReporter.__init__(self, output) - self.msgs = [] self.gui = gui def add_message(self, msg_id, location, msg): """manage message of different type and in the context of path""" - filename, module, obj, line, col_offset = location - msg = Message(self, msg_id, location, msg) - full_msg = [msg.C, msg_id, filename, module, obj, str(line), msg] - self.msgs += [[sigle, module, obj, str(line)]] - self.gui.msg_queue.put(full_msg) + message = Message(self, msg_id, location, msg) + self.gui.msg_queue.put(message) def _display(self, layout): """launch layouts display""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py index 14eae036..a51e0e7b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py @@ -12,13 +12,14 @@ # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """HTML reporter""" + import sys from cgi import escape -from ..logilab.common.ureports import HTMLWriter, Section, Table +from logilab.common.ureports import HTMLWriter, Section, Table -from . import BaseReporter, Message -from ..interfaces import IReporter +from pylint.interfaces import IReporter +from pylint.reporters import BaseReporter, Message class HTMLReporter(BaseReporter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py index c5dff1e7..555efc80 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py @@ -16,14 +16,14 @@ :text: the default one grouping messages by module :colorized: an ANSI colorized text reporter """ -import warnings -from ..logilab.common.textutils import colorize_ansi -from ..logilab.common.ureports import TextWriter +import warnings -from . import BaseReporter, Message -from ..interfaces import IReporter +from logilab.common.ureports import TextWriter +from logilab.common.textutils import colorize_ansi +from pylint.interfaces import IReporter +from pylint.reporters import BaseReporter, Message TITLE_UNDERLINES = ['', '=', '-', '.'] diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py index 32e54bf7..05e8b41e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py @@ -20,20 +20,20 @@ import re import sys import tokenize -from warnings import warn import os +from warnings import warn from os.path import dirname, basename, splitext, exists, isdir, join, normpath -from .logilab.common.interface import implements -from .logilab.common.modutils import modpath_from_file, get_module_files, \ +from logilab.common.interface import implements +from logilab.common.modutils import modpath_from_file, get_module_files, \ file_from_modpath, load_module_from_file -from .logilab.common.textutils import normalize_text -from .logilab.common.configuration import rest_format_section -from .logilab.common.ureports import Section +from logilab.common.textutils import normalize_text +from logilab.common.configuration import rest_format_section +from logilab.common.ureports import Section -from .astroid import nodes, Module +from astroid import nodes, Module -from .interfaces import IRawChecker, ITokenChecker +from pylint.interfaces import IRawChecker, ITokenChecker class UnknownMessage(Exception): @@ -43,7 +43,6 @@ class EmptyReport(Exception): """raised when a report is empty and so should not be displayed""" - MSG_TYPES = { 'I' : 'info', 'C' : 'convention', @@ -77,17 +76,6 @@ class WarningScope(object): NODE = 'node-based-msg' -def sort_msgs(msgids): - """sort message identifiers according to their category first""" - msgs = {} - for msg in msgids: - msgs.setdefault(msg[0], []).append(msg) - result = [] - for m_id in _MSG_ORDER: - if m_id in msgs: - result.extend( sorted(msgs[m_id]) ) - return result - def get_module_and_frameid(node): """return the module name and the frame id in the module""" frame = node.frame() @@ -122,18 +110,79 @@ def tokenize_module(module): return list(tokenize.generate_tokens(readline)) return list(tokenize.tokenize(readline)) +def build_message_def(checker, msgid, msg_tuple): + if implements(checker, (IRawChecker, ITokenChecker)): + default_scope = WarningScope.LINE + else: + default_scope = WarningScope.NODE + options = {} + if len(msg_tuple) > 3: + (msg, symbol, descr, options) = msg_tuple + elif len(msg_tuple) > 2: + (msg, symbol, descr) = msg_tuple[:3] + else: + # messages should have a symbol, but for backward compatibility + # they may not. + (msg, descr) = msg_tuple + warn("[pylint 0.26] description of message %s doesn't include " + "a symbolic name" % msgid, DeprecationWarning) + symbol = None + options.setdefault('scope', default_scope) + return MessageDefinition(checker, msgid, msg, descr, symbol, **options) + class MessageDefinition(object): - def __init__(self, checker, msgid, msg, descr, symbol, scope): + def __init__(self, checker, msgid, msg, descr, symbol, scope, + minversion=None, maxversion=None, old_names=None): + self.checker = checker assert len(msgid) == 5, 'Invalid message id %s' % msgid assert msgid[0] in MSG_TYPES, \ 'Bad message type %s in %r' % (msgid[0], msgid) self.msgid = msgid self.msg = msg self.descr = descr - self.checker = checker self.symbol = symbol self.scope = scope + self.minversion = minversion + self.maxversion = maxversion + self.old_names = old_names or [] + + def may_be_emitted(self): + """return True if message may be emitted using the current interpreter""" + if self.minversion is not None and self.minversion > sys.version_info: + return False + if self.maxversion is not None and self.maxversion <= sys.version_info: + return False + return True + + def format_help(self, checkerref=False): + """return the help string for the given message id""" + desc = self.descr + if checkerref: + desc += ' This message belongs to the %s checker.' % \ + self.checker.name + title = self.msg + if self.symbol: + msgid = '%s (%s)' % (self.symbol, self.msgid) + else: + msgid = self.msgid + if self.minversion or self.maxversion: + restr = [] + if self.minversion: + restr.append('< %s' % '.'.join([str(n) for n in self.minversion])) + if self.maxversion: + restr.append('>= %s' % '.'.join([str(n) for n in self.maxversion])) + restr = ' or '.join(restr) + if checkerref: + desc += " It can't be emitted when using Python %s." % restr + else: + desc += " This message can't be emitted when using Python %s." % restr + desc = normalize_text(' '.join(desc.split()), indent=' ') + if title != '%s': + title = title.splitlines()[0] + return ':%s: *%s*\n%s' % (msgid, title, desc) + return ':%s:\n%s' % (msgid, desc) + class MessagesHandlerMixIn(object): """a mix-in class containing all the messages related methods for the main @@ -141,10 +190,15 @@ class MessagesHandlerMixIn(object): """ def __init__(self): - # dictionary of registered messages + # Primary registry for all active messages (i.e. all messages + # that can be emitted by pylint for the underlying Python + # version). It contains the 1:1 mapping from symbolic names + # to message definition objects. self._messages = {} - # dictionary from string symbolic id to Message object. - self._messages_by_symbol = {} + # Maps alternative names (numeric IDs, deprecated names) to + # message definitions. May contain several names for each definition + # object. + self._alternative_names = {} self._msgs_state = {} self._module_msgs_state = {} # None self._raw_module_msgs_state = {} @@ -153,6 +207,16 @@ def __init__(self): self._ignored_msgs = {} self._suppression_mapping = {} + def add_renamed_message(self, old_id, old_symbol, new_symbol): + """Register the old ID and symbol for a warning that was renamed. + + This allows users to keep using the old ID/symbol in suppressions. + """ + msg = self.check_message_id(new_symbol) + msg.old_names.append((old_id, old_symbol)) + self._alternative_names[old_id] = msg + self._alternative_names[old_symbol] = msg + def register_messages(self, checker): """register a dictionary of messages @@ -162,65 +226,26 @@ def register_messages(self, checker): message ids should be a string of len 4, where the two first characters are the checker id and the two last the message id in this checker """ - msgs_dict = checker.msgs chkid = None - - for msgid, msg_tuple in msgs_dict.iteritems(): - if implements(checker, (IRawChecker, ITokenChecker)): - scope = WarningScope.LINE - else: - scope = WarningScope.NODE - if len(msg_tuple) > 2: - (msg, msgsymbol, msgdescr) = msg_tuple[:3] - assert msgsymbol not in self._messages_by_symbol, \ - 'Message symbol %r is already defined' % msgsymbol - if len(msg_tuple) > 3: - if 'scope' in msg_tuple[3]: - scope = msg_tuple[3]['scope'] - if 'minversion' in msg_tuple[3]: - minversion = msg_tuple[3]['minversion'] - if minversion > sys.version_info: - self._msgs_state[msgid] = False - continue - if 'maxversion' in msg_tuple[3]: - maxversion = msg_tuple[3]['maxversion'] - if maxversion <= sys.version_info: - self._msgs_state[msgid] = False - continue - else: - # messages should have a symbol, but for backward compatibility - # they may not. - (msg, msgdescr) = msg_tuple - warn("[pylint 0.26] description of message %s doesn't include " - "a symbolic name" % msgid, DeprecationWarning) - msgsymbol = None + for msgid, msg_tuple in checker.msgs.iteritems(): + msg = build_message_def(checker, msgid, msg_tuple) + assert msg.symbol not in self._messages, \ + 'Message symbol %r is already defined' % msg.symbol # avoid duplicate / malformed ids - assert msgid not in self._messages, \ + assert msg.msgid not in self._alternative_names, \ 'Message id %r is already defined' % msgid - assert chkid is None or chkid == msgid[1:3], \ + assert chkid is None or chkid == msg.msgid[1:3], \ 'Inconsistent checker part in message id %r' % msgid - chkid = msgid[1:3] - msg = MessageDefinition(checker, msgid, msg, msgdescr, msgsymbol, scope) - self._messages[msgid] = msg - self._messages_by_symbol[msgsymbol] = msg - self._msgs_by_category.setdefault(msgid[0], []).append(msgid) - - def get_message_help(self, msgid, checkerref=False): - """return the help string for the given message id""" - msg = self.check_message_id(msgid) - desc = normalize_text(' '.join(msg.descr.split()), indent=' ') - if checkerref: - desc += ' This message belongs to the %s checker.' % \ - msg.checker.name - title = msg.msg - if msg.symbol: - symbol_part = ' (%s)' % msg.symbol - else: - symbol_part = '' - if title != '%s': - title = title.splitlines()[0] - return ':%s%s: *%s*\n%s' % (msg.msgid, symbol_part, title, desc) - return ':%s%s:\n%s' % (msg.msgid, symbol_part, desc) + chkid = msg.msgid[1:3] + if not msg.may_be_emitted(): + self._msgs_state[msg.msgid] = False + continue + self._messages[msg.symbol] = msg + self._alternative_names[msg.msgid] = msg + for old_id, old_symbol in msg.old_names: + self._alternative_names[old_id] = msg + self._alternative_names[old_symbol] = msg + self._msgs_by_category.setdefault(msg.msgid[0], []).append(msg.msgid) def disable(self, msgid, scope='package', line=None): """don't output message of the given id""" @@ -240,7 +265,7 @@ def disable(self, msgid, scope='package', line=None): if msgid.lower() in self._checkers: for checker in self._checkers[msgid.lower()]: for _msgid in checker.msgs: - if _msgid in self._messages: + if _msgid in self._alternative_names: self.disable(_msgid, scope, line) return # msgid is report id? @@ -306,13 +331,14 @@ def check_message_id(self, msgid): Raises UnknownMessage if the message id is not defined. """ - if msgid in self._messages_by_symbol: - return self._messages_by_symbol[msgid] - msgid = msgid.upper() - try: - return self._messages[msgid] - except KeyError: - raise UnknownMessage('No such message id %s' % msgid) + if msgid[1:].isdigit(): + msgid = msgid.upper() + for source in (self._alternative_names, self._messages): + try: + return source[msgid] + except KeyError: + pass + raise UnknownMessage('No such message id %s' % msgid) def get_msg_display_string(self, msgid): """Generates a user-consumable representation of a message. @@ -329,14 +355,19 @@ def get_message_state_scope(self, msgid, line=None): except (KeyError, TypeError): return MSG_STATE_SCOPE_CONFIG - def is_message_enabled(self, msgid, line=None): + def is_message_enabled(self, msg_descr, line=None): """return true if the message associated to the given message id is enabled msgid may be either a numeric or symbolic message id. """ - if msgid in self._messages_by_symbol: - msgid = self._messages_by_symbol[msgid].msgid + try: + msgid = self.check_message_id(msg_descr).msgid + except UnknownMessage: + # The linter checks for messages that are not registered + # due to version mismatch, just treat them as message IDs + # for now. + msgid = msg_descr if line is None: return self._msgs_state.get(msgid, True) try: @@ -396,9 +427,9 @@ def add_message(self, msg_descr, line=None, node=None, args=None): self.stats[msg_cat] += 1 self.stats['by_module'][self.current_name][msg_cat] += 1 try: - self.stats['by_msg'][msgid] += 1 + self.stats['by_msg'][msg_info.symbol] += 1 except KeyError: - self.stats['by_msg'][msgid] = 1 + self.stats['by_msg'][msg_info.symbol] = 1 # expand message ? msg = msg_info.msg if args: @@ -417,7 +448,7 @@ def help_message(self, msgids): """display help messages for the given message identifiers""" for msgid in msgids: try: - print self.get_message_help(msgid, True) + print self.check_message_id(msgid).format_help(checkerref=True) print except UnknownMessage, ex: print ex @@ -467,8 +498,10 @@ def print_full_documentation(self): title = ('%smessages' % prefix).capitalize() print title print '~' * len(title) - for msgid in sort_msgs(msgs.iterkeys()): - print self.get_message_help(msgid, False) + for msgid, msg in sorted(msgs.iteritems(), + key=lambda (k,v): (_MSG_ORDER.index(k[0]), k)): + msg = build_message_def(checker, msgid, msg) + print msg.format_help(checkerref=False) print if reports: title = ('%sreports' % prefix).capitalize() @@ -479,14 +512,16 @@ def print_full_documentation(self): print print + @property + def messages(self): + """The list of all active messages.""" + return self._messages.itervalues() + def list_messages(self): """output full messages list documentation in ReST format""" - msgids = [] - for msgid in self._messages: - msgids.append(msgid) - msgids.sort() - for msgid in msgids: - print self.get_message_help(msgid, False) + msgs = sorted(self._messages.itervalues(), key=lambda msg: msg.msgid) + for msg in msgs: + print msg.format_help(checkerref=False) print diff --git a/pymode/libs/pylama/main.py b/pymode/libs/pylama/main.py index 591d6df2..a52d2255 100644 --- a/pymode/libs/pylama/main.py +++ b/pymode/libs/pylama/main.py @@ -8,9 +8,6 @@ from .core import LOGGER -DEFAULT_COMPLEXITY = 10 - - def shell(args=None, error=True): """ Endpoint for console. diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/tasks.py index be9a34b2..4616fbf2 100644 --- a/pymode/libs/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -87,19 +87,10 @@ def check_path(path, options=None, rootpath=None, code=None, **meta): """ LOGGER.info("Parse file: %s", path) - config = dict() - if rootpath is None: - rootpath = '.' - - for mask in options.file_params: - if mask.match(path): - config.update(options.file_params[mask]) + rootpath = rootpath or '.' errors = [] - for error in run( - path, ignore=options.ignore, select=options.select, - linters=options.linters, complexity=options.complexity, - config=config, code=code, **meta): + for error in run(path, code, options): try: error['rel'] = op.relpath(error['filename'], rootpath) error['col'] = error.get('col', 1) diff --git a/pymode/libs/pylama/utils.py b/pymode/libs/pylama/utils.py deleted file mode 100644 index b754248d..00000000 --- a/pymode/libs/pylama/utils.py +++ /dev/null @@ -1,184 +0,0 @@ -""" Interfaces for code checking. """ -from __future__ import absolute_import, with_statement - -import _ast -from os import path as op, environ - -from .checkers.pep8 import BaseReport, StyleGuide - - -__all__ = 'pep8', 'pep257', 'mccabe', 'pyflakes', 'pylint', 'gjslint' - -PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc')) - - -class _PEP8Report(BaseReport): - - def __init__(self, *args, **kwargs): - super(_PEP8Report, self).__init__(*args, **kwargs) - self.errors = [] - - def init_file(self, filename, lines, expected, line_offset): - """ Prepare storage for errors. """ - super(_PEP8Report, self).init_file( - filename, lines, expected, line_offset) - self.errors = [] - - def error(self, line_number, offset, text, check): - """ Save errors. """ - code = super(_PEP8Report, self).error( - line_number, offset, text, check) - - self.errors.append(dict( - text=text, - type=code, - col=offset + 1, - lnum=line_number, - )) - - def get_file_results(self): - """ Get errors. - - :return list: List of errors. - - """ - return self.errors - - -def pep8(path, **meta): - """ PEP8 code checking. - - :return list: List of errors. - - """ - P8Style = StyleGuide(reporter=_PEP8Report) - return P8Style.input_file(path) - - -def mccabe(path, code=None, complexity=8, **meta): - """ MCCabe code checking. - - :return list: List of errors. - - """ - from .checkers.mccabe import get_code_complexity - - return get_code_complexity(code, complexity, filename=path) or [] - - -def pyflakes(path, code=None, **meta): - """ Pyflake code checking. - - :return list: List of errors. - - """ - from .checkers.pyflakes import checker - - errors = [] - tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST) - w = checker.Checker(tree, path) - w.messages = sorted(w.messages, key=lambda m: m.lineno) - for w in w.messages: - errors.append(dict( - lnum=w.lineno, - text=w.message % w.message_args, - )) - return errors - - -def pylint(path, **meta): - """ Pylint code checking. - - :return list: List of errors. - - """ - from sys import version_info - if version_info > (3, 0): - import logging - logging.warn("Pylint don't supported python3 and will be disabled.") - return [] - - from .checkers.pylint.lint import Run - from .checkers.pylint.reporters import BaseReporter - from .checkers.pylint.astroid import MANAGER - - MANAGER.astroid_cache.clear() - - class Reporter(BaseReporter): - - def __init__(self): - self.errors = [] - BaseReporter.__init__(self) - - def _display(self, layout): - pass - - def add_message(self, msg_id, location, msg): - _, _, line, col = location[1:] - self.errors.append(dict( - lnum=line, - col=col, - text="%s %s" % (msg_id, msg), - type=msg_id[0] - )) - - pylintrc = op.join(environ.get('HOME', ''), '.pylintrc') - defattrs = '-r n' - if not op.exists(pylintrc): - defattrs += ' --rcfile={0}'.format(PYLINT_RC) - attrs = meta.get('pylint', defattrs.split()) - - runner = Run( - [path] + attrs, reporter=Reporter(), exit=False) - return runner.linter.reporter.errors - - -def pep257(path, **meta): - """ PEP257 code checking. - - :return list: List of errors. - - """ - f = open(path) - from .checkers.pep257 import check_source - - errors = [] - for er in check_source(f.read(), path): - errors.append(dict( - lnum=er.line, - col=er.char, - text='C0110 %s' % er.explanation.split('\n')[0].strip(), - type='W', - )) - return errors - - -def gjslint(path, code=None, **meta): - """ gjslint code checking. - - :return list: List of errors. - - """ - from .checkers.closure_linter import gjslint as lint - - errors = [] - records_iter = lint.main(["", path]) - - import re - regExErrStr = re.compile(r'Line\s(\d+),\s(E:\d+):\s(.*)') - for record in records_iter: - matchErrStr = re.match(regExErrStr, record.error_string) - if matchErrStr: - errors.append( - dict( - type=matchErrStr.group(2), - lnum=matchErrStr.group(1), - # due to errors filtering type is combined with the - # text - text=" ".join([matchErrStr.group( - 2), matchErrStr.group(3)]) - )) - - return errors - -# pymode:lint_ignore=W0231 diff --git a/pymode/lint.py b/pymode/lint.py index eaf62470..55a633d2 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -40,7 +40,8 @@ def code_check(): LOGGER.setLevel(logging.DEBUG) with silence_stderr(): - errors = check_path(path, options=options, code='\n'.join(env.curbuf)) + errors = check_path(path, options=options, + code='\n'.join(env.curbuf) + '\n') env.debug("Find errors: ", len(errors)) sort_rules = env.var('g:pymode_lint_sort') From 3e7aec072d98d3576a1af46df536a0c2f50315ab Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Dec 2013 23:38:22 +0700 Subject: [PATCH 057/428] Respect &fillchars. --- Changelog.rst | 2 ++ autoload/pymode/folding.vim | 6 +++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index 0c405c44..c3fa0ad3 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,6 +1,8 @@ Changelog ========= + * Get fold's expression symbol from &fillchars + ## 2013-12-04 0.7.8b -------------------- * Update indentation support; diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 9865f5ed..7718e75d 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -7,6 +7,10 @@ let s:decorator_regex = '^\s*@' let s:doc_begin_regex = '^\s*\%("""\|''''''\)' let s:doc_end_regex = '\%("""\|''''''\)\s*$' let s:doc_line_regex = '^\s*\("""\|''''''\).\+\1\s*$' +let s:symbol = ' ' +if stridx(&fillchars, 'fold') > -1 + let s:symbol = strpart(&fillchars, stridx(&fillchars, 'fold') + 5, 1) +endif fun! pymode#folding#text() " {{{ @@ -27,7 +31,7 @@ fun! pymode#folding#text() " {{{ let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) let line = substitute(line, '\%("""\|''''''\)', '', '') let fillcharcount = windowwidth - len(line) - len(foldedlinecount) - return line . '…' . repeat(" ", fillcharcount) . ' ' . foldedlinecount . ' ' + return line . '…' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount . ' ' endfunction "}}} From c65773cf5e5c6cbb7b194d45be7d31137a6b7ddd Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sun, 8 Dec 2013 05:32:32 +0700 Subject: [PATCH 058/428] Update pylama --- pymode/libs/pylama/core.py | 6 ++- pymode/libs/pylama/lint/extensions.py | 7 ++- .../pylama/lint/pylama_pyflakes/__init__.py | 4 +- .../pylama/lint/pylama_pylint/__init__.py | 4 +- pymode/libs/pylama/lint/pylama_pylint/main.py | 1 - pymode/lint.py | 45 +++++++++---------- 6 files changed, 37 insertions(+), 30 deletions(-) diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 93890bc8..7c5c8c4e 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -53,6 +53,7 @@ def run(path, code=None, options=None): item = (item, LINTERS.get(item)) name, linter = item + LOGGER.debug("Run %s", name) if not linter or not linter.allow(path): continue @@ -70,16 +71,19 @@ def run(path, code=None, options=None): errors.append(e) except IOError as e: + LOGGER.debug("IOError %s", e) errors.append(dict( lnum=0, type='E', col=0, text=str(e), filename=path or '')) except SyntaxError as e: + LOGGER.debug("SyntaxError %s", e) errors.append(dict( lnum=e.lineno or 0, type='E', col=e.offset or 0, text=e.args[0] + ' [%s]' % name, filename=path or '' )) - except Exception: + except Exception as e: + LOGGER.debug("Unknown exception %s", e) import traceback logging.debug(traceback.format_exc()) diff --git a/pymode/libs/pylama/lint/extensions.py b/pymode/libs/pylama/lint/extensions.py index d5e92055..cdf344be 100644 --- a/pymode/libs/pylama/lint/extensions.py +++ b/pymode/libs/pylama/lint/extensions.py @@ -15,8 +15,11 @@ for p in listdir(CURDIR): if p.startswith(PREFIX) and op.isdir(op.join(CURDIR, p)): name = p[len(PREFIX):] - module = import_module('.lint.%s%s' % (PREFIX, name), 'pylama') - LINTERS[name] = getattr(module, 'Linter')() + try: + module = import_module('.lint.%s%s' % (PREFIX, name), 'pylama') + LINTERS[name] = getattr(module, 'Linter')() + except ImportError: + continue try: from pkg_resources import iter_entry_points diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py index d2f95dee..4e0fd97a 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py @@ -8,15 +8,15 @@ class Linter(BaseLinter): """ Pyflakes code check. """ @staticmethod - def run(path, code=None, builtins=None, **meta): + def run(path, code=None, builtins="", **meta): """ Pyflake code checking. :return list: List of errors. """ import _ast - from .pyflakes import checker import os + from .pyflakes import checker os.environ.setdefault('PYFLAKES_BUILTINS', builtins) diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index 56bdb3ae..9d28da36 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -12,9 +12,11 @@ import os.path import sys +if sys.version_info >= (3, 0, 0): + raise ImportError("pylama_pylint doesnt support python3") + CURDIR = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, CURDIR) from .main import Linter assert Linter - diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 20bebf7c..8dcd8df7 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -52,4 +52,3 @@ def add_message(self, msg_id, location, msg): runner = Run( [path] + attrs, reporter=Reporter(), exit=False) return runner.linter.reporter.errors - diff --git a/pymode/lint.py b/pymode/lint.py index 55a633d2..854458dc 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -13,35 +13,34 @@ def code_check(): """ - from pylama.main import parse_options - from pylama.tasks import check_path + with silence_stderr(): - if not env.curbuf.name: - env.stop() - return False + from pylama.main import parse_options + from pylama.tasks import check_path - options = parse_options( - ignore=env.var('g:pymode_lint_ignore'), - select=env.var('g:pymode_lint_select'), - linters=env.var('g:pymode_lint_checkers'), - ) + if not env.curbuf.name: + return env.stop() - path = os.path.relpath(env.curbuf.name, env.curdir) - env.debug("Start code check: ", path) + options = parse_options( + ignore=env.var('g:pymode_lint_ignore'), + select=env.var('g:pymode_lint_select'), + linters=env.var('g:pymode_lint_checkers'), + ) - if getattr(options, 'skip', None) and any(p.match(path) for p in options.skip): # noqa - env.message('Skip code checking.') - env.debug("Skipped") - env.stop() - return False + path = os.path.relpath(env.curbuf.name, env.curdir) + env.debug("Start code check: ", path) - if env.options.get('debug'): - from pylama.core import LOGGER, logging - LOGGER.setLevel(logging.DEBUG) + if getattr(options, 'skip', None) and any(p.match(path) for p in options.skip): # noqa + env.message('Skip code checking.') + env.debug("Skipped") + return env.stop() - with silence_stderr(): - errors = check_path(path, options=options, - code='\n'.join(env.curbuf) + '\n') + if env.options.get('debug'): + from pylama.core import LOGGER, logging + LOGGER.setLevel(logging.DEBUG) + + errors = check_path( + path, options=options, code='\n'.join(env.curbuf) + '\n') env.debug("Find errors: ", len(errors)) sort_rules = env.var('g:pymode_lint_sort') From e5af8e9faa5b94b085c2ca671337afad0de0bcc6 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 9 Dec 2013 22:23:20 +0700 Subject: [PATCH 059/428] Update pylama --- .../pylama/lint/pylama_pylint/__init__.py | 2 +- .../lint/pylama_pylint/astroid/__init__.py | 20 +- .../lint/pylama_pylint/astroid/brain/py2gi.py | 147 ------------ .../astroid/brain/py2mechanize.py | 20 -- .../pylama_pylint/astroid/brain/py2qt4.py | 25 -- .../pylama_pylint/astroid/brain/py2stdlib.py | 227 ------------------ 6 files changed, 11 insertions(+), 430 deletions(-) delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index 9d28da36..a65598db 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,7 +4,7 @@ # ================== -__version__ = '0.1.3' +__version__ = '0.1.5' __project__ = 'pylama_pylint' __author__ = "horneds " __license__ = "BSD" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py index 19c80902..af602765 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py @@ -106,13 +106,13 @@ def transform(node, infer_function=infer_function): return transform # load brain plugins -from os import listdir -from os.path import join, dirname -BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') -if BRAIN_MODULES_DIR not in sys.path: - # add it to the end of the list so user path take precedence - sys.path.append(BRAIN_MODULES_DIR) -# load modules in this directory -for module in listdir(BRAIN_MODULES_DIR): - if module.endswith('.py'): - __import__(module[:-3]) +# from os import listdir +# from os.path import join, dirname +# BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') +# if BRAIN_MODULES_DIR not in sys.path: + # # add it to the end of the list so user path take precedence + # sys.path.append(BRAIN_MODULES_DIR) +# # load modules in this directory +# for module in listdir(BRAIN_MODULES_DIR): + # if module.endswith('.py'): + # __import__(module[:-3]) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py deleted file mode 100644 index 5001b7cb..00000000 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Astroid hooks for the Python 2 GObject introspection bindings. - -Helps with understanding everything imported from 'gi.repository' -""" - -import inspect -import sys - -from astroid import MANAGER, AstroidBuildingException -from astroid.builder import AstroidBuilder - - -_inspected_modules = {} - - -def _gi_build_stub(parent): - """ - Inspect the passed module recursively and build stubs for functions, - classes, etc. - """ - classes = {} - functions = {} - constants = {} - methods = {} - for name in dir(parent): - if not name or name.startswith("__"): - # GLib.IConv has a parameter named "" :/ - continue - try: - obj = getattr(parent, name) - except: - continue - - if inspect.isclass(obj): - classes[name] = obj - elif (inspect.isfunction(obj) or - inspect.isbuiltin(obj)): - functions[name] = obj - elif (inspect.ismethod(obj) or - inspect.ismethoddescriptor(obj)): - methods[name] = obj - elif type(obj) in [int, str]: - constants[name] = obj - elif (str(obj).startswith(" Date: Tue, 10 Dec 2013 16:45:32 -0300 Subject: [PATCH 060/428] Real paths work allways, even when cwd != project root --- pymode/rope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index 146a7fdf..704151a0 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -151,7 +151,7 @@ def goto(): return env.goto_file( - found_resource.path, cmd=ctx.options.get('goto_definition_cmd')) + found_resource.real_path, cmd=ctx.options.get('goto_definition_cmd')) env.goto_line(line) From 09c3e4585d3b3c1735c54668b40a7ec5d229f298 Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Thu, 12 Dec 2013 18:37:34 +0100 Subject: [PATCH 061/428] Fix typo: s/Wich/Which --- pymode/rope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index 704151a0..7d50c7d4 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -327,7 +327,7 @@ def autoimport(): _insert_import(word, modules[0], ctx) else: - module = env.user_input_choices('Wich module to import:', *modules) + module = env.user_input_choices('Which module to import:', *modules) _insert_import(word, module, ctx) return True From 8a10b9669fcb94532bdcebf69fb75bc1913b3c37 Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Thu, 12 Dec 2013 18:40:13 +0100 Subject: [PATCH 062/428] folding: handle multibyte chars in &fillchars This uses `matchstr`, which is multibyte aware, while `strpart` is not. --- autoload/pymode/folding.vim | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 7718e75d..9bfa78d2 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -7,9 +7,9 @@ let s:decorator_regex = '^\s*@' let s:doc_begin_regex = '^\s*\%("""\|''''''\)' let s:doc_end_regex = '\%("""\|''''''\)\s*$' let s:doc_line_regex = '^\s*\("""\|''''''\).\+\1\s*$' -let s:symbol = ' ' -if stridx(&fillchars, 'fold') > -1 - let s:symbol = strpart(&fillchars, stridx(&fillchars, 'fold') + 5, 1) +let s:symbol = matchstr(&fillchars, 'fold:\zs.') " handles multibyte characters +if s:symbol == '' + let s:symbol = ' ' endif From 88f2e67adaf16ba204b665dfefa2ba6e25d5869a Mon Sep 17 00:00:00 2001 From: Wouter Overmeire Date: Wed, 18 Dec 2013 13:41:15 +0100 Subject: [PATCH 063/428] fix doc typo --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 3fa513ce..4c12e5d5 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -112,7 +112,7 @@ python buffers: > Setup pymode |quickfix| window - *'g:pymode_quickfix_maxheght'* *'g:pymode_quickfix_minheight'* + *'g:pymode_quickfix_maxheight'* *'g:pymode_quickfix_minheight'* > let g:pymode_quickfix_minheight = 3 let g:pymode_quickfix_maxheight = 6 From 5db6579eab1ade8240a8cdb8b5a96cc879d53ce5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9gis=20Behmo?= Date: Fri, 10 Jan 2014 14:37:33 +0100 Subject: [PATCH 064/428] Documentation is wrong on default values By default, variables pymode_lint_on_fly and pymode_lint_unmodified are set to 0. --- doc/pymode.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 4c12e5d5..9c4987d5 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -278,11 +278,11 @@ Check code on every save (if file has been modified) *'g:pymode_lint_on_write'* Check code on every save (every) *'g:pymode_lint_unmodified'* > - let g:pymode_lint_unmodified = 1 + let g:pymode_lint_unmodified = 0 Check code when editting (onfly) *'g:pymode_lint_on_fly'* > - let g:pymode_lint_on_fly = 1 + let g:pymode_lint_on_fly = 0 Show error message if cursor placed at the error line *'g:pymode_lint_message'* > From 4f19336c28cc3b03e36191ef820cb360c59359c8 Mon Sep 17 00:00:00 2001 From: Daniele Esposti Date: Tue, 21 Jan 2014 23:33:07 +0000 Subject: [PATCH 065/428] Fixed error when setting g:pymode_breakpoint_cmdin the vimrc file --- autoload/pymode/breakpoint.vim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index f932117c..18e1a95b 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -11,6 +11,8 @@ fun! pymode#breakpoint#init() "{{{ return endif + endif + PymodePython << EOF from imp import find_module @@ -25,8 +27,6 @@ for module in ('pudb', 'ipdb'): EOF - endif - endfunction "}}} fun! pymode#breakpoint#operate(lnum) "{{{ From 161efbd6ed0de2ef3964c351ec3df837a22c370e Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 30 Jan 2014 11:50:59 +0100 Subject: [PATCH 066/428] Corrected small typo --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 0d364b25..497a8c45 100644 --- a/README.rst +++ b/README.rst @@ -13,7 +13,7 @@ by utilizing libraries including pylint_, rope_, pydoc_, pyflakes_, pep8_, and mccabe_ for features like static analysis, refactoring, folding, completion, documentation, and more. -The plugin containts all you need to develop python applications in Vim. +The plugin contains all you need to develop python applications in Vim. There is no need to install pylint_, rope_ or any other Python libraries on your system. From d83459ddd9ec1631fee85576892ae3ff23edf69a Mon Sep 17 00:00:00 2001 From: Mike Perrone Date: Thu, 6 Feb 2014 01:31:03 -0500 Subject: [PATCH 067/428] fix minor typo with rename module binding --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 4c12e5d5..225ff9eb 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -468,7 +468,7 @@ Rename a current module/package ~ Keymap for rename current module *'g:pymode_rope_rename_module_bind'* > - let g:pymode_rope_rename_bind = 'rr' + let g:pymode_rope_rename_module_bind = 'r1r' Imports ~ From 7e972de77a7a417489d66124435c4b918f1db264 Mon Sep 17 00:00:00 2001 From: Mike Perrone Date: Thu, 6 Feb 2014 10:07:16 -0500 Subject: [PATCH 068/428] choosen -> chosen --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index a6e26a93..11cf47d3 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -292,7 +292,7 @@ Default code checkers (you could set several) *'g:pymode_lint_checkers'* > let g:pymode_lint_checkers = ['pyflakes', 'pep8', 'mccabe'] -Values may be choosen from: `pylint`, `pep8`, `mccabe`, `pep257`, `pyflakes`. +Values may be chosen from: `pylint`, `pep8`, `mccabe`, `pep257`, `pyflakes`. Skip errors and warnings *'g:pymode_lint_ignore'* E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors startswith E2) and etc From 5a7594f6e218dda3e13b123d406d1c2ae94d979b Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sun, 9 Feb 2014 00:50:01 +0700 Subject: [PATCH 069/428] Update changelog --- Changelog.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index c3fa0ad3..dcbca1f9 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,9 @@ Changelog ========= - * Get fold's expression symbol from &fillchars + * Get fold's expression symbol from &fillchars; + * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); + ## 2013-12-04 0.7.8b -------------------- From fd6ebde4a61cdc0267fbfbe5fa82b60684246ef7 Mon Sep 17 00:00:00 2001 From: Brad Mease Date: Sun, 23 Feb 2014 21:25:16 -0500 Subject: [PATCH 070/428] Fix spelling and grammatical errors in documentation --- doc/pymode.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 89ba81a3..ebbfc849 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -43,7 +43,7 @@ refactoring, and some other useful things. This plugin allows you to create python code in vim very easily. There is no need to install the pylint or rope libraries on your system. -Python-mode containts all you need to develop python applications in Vim. +Python-mode contains all you need to develop python applications in Vim. Features: *pymode-features* @@ -70,7 +70,7 @@ Features: *pymode-features* 2. Common functionality ~ *pymode-common* -This script provides the following options that can customise the behaviour of +This script provides the following options that can customizes the behavior of PythonMode. These options should be set in your |vimrc|. Bellow shows the default values. @@ -80,7 +80,7 @@ Turn on the whole plugin *'g:pymode'* > let g:pymode = 1 -Trun off plugin's warnings *'g:pymode_warnings'* +Turn off plugin's warnings *'g:pymode_warnings'* > let g:pymode_warnings = 1 @@ -89,7 +89,7 @@ Value is list of path's strings. > let g:pymode_paths = [] -Trim unused whitespaces on save *'g:pymode_trim_whitespaces'* +Trim unused white spaces on save *'g:pymode_trim_whitespaces'* > let g:pymode_trim_whitespaces = 1 @@ -280,7 +280,7 @@ Check code on every save (every) *'g:pymode_lint_unmodified'* > let g:pymode_lint_unmodified = 0 -Check code when editting (onfly) *'g:pymode_lint_on_fly'* +Check code when editing (on the fly) *'g:pymode_lint_on_fly'* > let g:pymode_lint_on_fly = 0 @@ -295,7 +295,7 @@ Default code checkers (you could set several) *'g:pymode_lint_checkers'* Values may be chosen from: `pylint`, `pep8`, `mccabe`, `pep257`, `pyflakes`. Skip errors and warnings *'g:pymode_lint_ignore'* -E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors startswith E2) and etc +E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors that starts with E2) and etc > let g:pymode_lint_ignore = "E501,W" @@ -306,7 +306,7 @@ By example you disable all warnings starting from 'W', but want to see warning let g:pymode_lint_select = "E501,W0011,W430" Sort errors by relevance *'g:pymode_lint_sort'* -If not emply, errors will be sort by defined relevance +If not empty, errors will be sort by defined relevance E.g. let g:pymode_lint_sort = ['E', 'C', 'I'] " Errors first 'E', after them 'C' and ... > @@ -442,7 +442,7 @@ Leave empty for disable key binding. *'g:pymode_rope_goto_definition_bind'* > let g:pymode_rope_goto_definition_bind = 'g' -Command for open window when definition has been finded +Command for open window when definition has been found Values are (`e`, `new`, `vnew`) *'g:pymode_rope_goto_definition_cmd'* > let g:pymode_rope_goto_definition_cmd = 'new' @@ -559,7 +559,7 @@ docstrings. Consider disabling this on slower hardware. > let g:pymode_syntax_slow_sync = 1 -Enable all python highlightings *'g:pymode_syntax_all'* +Enable all python highlights *'g:pymode_syntax_all'* > let g:pymode_syntax_all = 1 @@ -651,7 +651,7 @@ Pylint check is very slow ------------------------- In some projects pylint may check slowly, because it also scans imported -modules if possible. Try using another code checkiers: see +modules if possible. Try using another code checker: see |'g:pymode_lint_checkers'|. You may set |exrc| and |secure| in your |vimrc| to auto-set custom settings From 443edd9b291b7255ef0c841326c983c468e4b0ea Mon Sep 17 00:00:00 2001 From: Brad Mease Date: Sun, 23 Feb 2014 21:26:09 -0500 Subject: [PATCH 071/428] Fix misspelled configuration variable in documentation --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index ebbfc849..f37e45d2 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -587,7 +587,7 @@ Highlight space's errors *'g:pymode_syntax_space_errors'* > let g:pymode_syntax_space_errors = g:pymode_syntax_all -Highlight string formatting *'g:pymode_syntax_string_formating'* +Highlight string formatting *'g:pymode_syntax_string_formatting'* *'g:pymode_syntax_string_format'* *'g:pymode_syntax_string_templates'* *'g:pymode_syntax_doctests'* From 1f1ae3e4a631a42c366610ae607f708bacd9183f Mon Sep 17 00:00:00 2001 From: "Wu, Fan" Date: Tue, 25 Feb 2014 04:25:47 -0500 Subject: [PATCH 072/428] fix typo 'trun on' --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 89ba81a3..958f0b8f 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -80,7 +80,7 @@ Turn on the whole plugin *'g:pymode'* > let g:pymode = 1 -Trun off plugin's warnings *'g:pymode_warnings'* +Turn off plugin's warnings *'g:pymode_warnings'* > let g:pymode_warnings = 1 From 37455d73388e22b6004728064951250544123651 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Feb 2014 16:04:18 +0700 Subject: [PATCH 073/428] Add shortcut to tests run --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f343ec77..a4370605 100644 --- a/Makefile +++ b/Makefile @@ -13,11 +13,12 @@ clean: travis: rake test -.PHONY: test +.PHONY: test t test: bundle install rm -rf $(CURDIR)/.ropeproject rake test +t: test .PHONY: pylama pylama: From 2fa044a2f2dffed2ea19a118ccc4c92c0d816276 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Feb 2014 16:04:56 +0700 Subject: [PATCH 074/428] Minor fixes. --- autoload/pymode/doc.vim | 4 ---- 1 file changed, 4 deletions(-) diff --git a/autoload/pymode/doc.vim b/autoload/pymode/doc.vim index 3030dcea..d29d5e9e 100644 --- a/autoload/pymode/doc.vim +++ b/autoload/pymode/doc.vim @@ -2,7 +2,6 @@ " PymodePython import pymode - fun! pymode#doc#find() "{{{ " Extract the 'word' at the cursor, expanding leftwards across identifiers " and the . operator, and rightwards across the identifier only. @@ -19,8 +18,6 @@ fun! pymode#doc#find() "{{{ call pymode#doc#show(word) endfunction "}}} - - fun! pymode#doc#show(word) "{{{ if a:word == '' call pymode#error("No name/symbol under cursor!") @@ -35,4 +32,3 @@ fun! pymode#doc#show(word) "{{{ wincmd p endfunction "}}} - From 0c6233a8bf4083b05f1fb68e03fa5fb456c82c2b Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Feb 2014 16:05:36 +0700 Subject: [PATCH 075/428] Fix lint tests. --- t/lint.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/lint.vim b/t/lint.vim index 14de6e54..29463a82 100644 --- a/t/lint.vim +++ b/t/lint.vim @@ -14,7 +14,7 @@ describe 'pymode check code' Expect g:pymode_lint == 1 end - it 'lint code' + it 'lint new' put =['# coding: utf-8', 'call_unknown_function()'] PymodeLint Expect getloclist(0) == [] From 43ad3fde79624ef8b788342a222a87fc92701dae Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Feb 2014 16:12:56 +0700 Subject: [PATCH 076/428] Update AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 39ab76ed..92cdf73a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -11,6 +11,7 @@ Contributors: * Ashley Hewson (ashleyh); * Benjamin Ruston (bruston); * Boris Filippov (frenzykryger); +* Brad Mease (bmease) * Daniel Hahler (blueyed) * David Vogt (winged); * Denis Kasak (dkasak); From b92c9828e21458347c1b37685ba0d16423652ef2 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 12 Mar 2014 02:28:03 +0700 Subject: [PATCH 077/428] Fix code running. Close #350, #354, #365 --- Changelog.rst | 1 + autoload/pymode/tools/loclist.vim | 2 +- pymode/run.py | 19 +++++++++++++++---- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index dcbca1f9..8d102795 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -3,6 +3,7 @@ Changelog * Get fold's expression symbol from &fillchars; * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); + * Fixed code running; ## 2013-12-04 0.7.8b diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim index 7daaea7a..4e960fe8 100644 --- a/autoload/pymode/tools/loclist.vim +++ b/autoload/pymode/tools/loclist.vim @@ -70,7 +70,7 @@ fun! g:PymodeLocList.show() "{{{ lclose else let num = winnr() - execute "lopen " . g:pymode_quickfix_maxheight + lopen execute max([min([line("$"), g:pymode_quickfix_maxheight]), g:pymode_quickfix_minheight]) . "wincmd _" if num != winnr() call setwinvar(winnr(), 'quickfix_title', self._title . ' <' . self._name . '>') diff --git a/pymode/run.py b/pymode/run.py index 4a39f530..82d30b57 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -8,14 +8,25 @@ import sys from .environment import env +from re import compile as re + + +encoding = re(r'#[^\w]+coding:\s+utf.*$') def run_code(): - """ Run python code in current buffer. """ + """ Run python code in current buffer. + + :returns: None + + """ errors, err = [], '' line1, line2 = env.var('a:line1'), env.var('a:line2') lines = __prepare_lines(line1, line2) + for ix in (0, 1): + if encoding.match(lines[ix]): + lines.pop(ix) context = dict( __name__='__main__', input=env.user_input, raw_input=env.user_input) @@ -42,14 +53,14 @@ def run_code(): else: err = sys.stderr.getvalue() - output = sys.stdout.getvalue().strip() - output = env.prepare_value(output) + output = sys.stdout.getvalue() + output = env.prepare_value(output, dumps=False) sys.stdout, sys.stderr = stdout_, stderr_ errors += [er for er in err.splitlines() if er and "" not in er] env.let('l:traceback', errors[2:]) - env.let('l:output', [s for s in output.split('\n') if s]) + env.let('l:output', [s for s in output.split('\n')]) def __prepare_lines(line1, line2): From ad90cfa1bc04d9dffa07f18cd16f1a26dad8cf31 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 13 Mar 2014 22:43:18 +0700 Subject: [PATCH 078/428] Fix python detection --- plugin/pymode.vim | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index d7646c9d..e84cd651 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -19,7 +19,7 @@ filetype plugin on " OPTIONS: {{{ " Vim Python interpreter. Set to 'disable' for remove python features. -call pymode#default('g:pymode_python', 'python') +call pymode#default('g:pymode_python', '') " Disable pymode warnings call pymode#default('g:pymode_warning', 1) @@ -247,26 +247,46 @@ endif " Disable python-related functionality " let g:pymode_python = 'disable' " let g:pymode_python = 'python3' -if g:pymode_python != 'disable' && (g:pymode_python == 'python3' || !has('python') && has('python3')) - let g:pymode_python = 'python3' - command! -nargs=1 PymodePython python3 -elseif g:pymode_python != 'disable' && has('python') - let g:pymode_python = 'python' +" UltiSnips Fixes +if !len(g:pymode_python) + if exists('g:_uspy') && g:_uspy == ':py' + let g:pymode_python = 'python' + elseif exists('g:_uspy') && g:_uspy == ':py3' + let g:pymode_python = 'python3' + elseif has("python") + let g:pymode_python = 'python' + elseif has("python3") + let g:pymode_python = 'python3' + else + let g:pymode_python = 'disable' + endif +endif + +if g:pymode_python == 'python' + command! -nargs=1 PymodePython python + let g:UltiSnipsUsePythonVersion = 2 + +elseif g:pymode_python == 'python3' + + command! -nargs=1 PymodePython python3 + let g:UltiSnipsUsePythonVersion = 3 else let g:pymode_doc = 0 let g:pymode_lint = 0 let g:pymode_path = 0 - let g:pymode_python = 'disable' let g:pymode_rope = 0 let g:pymode_run = 0 let g:pymode_virtualenv = 0 + command! -nargs=1 PymodePython echo + endif + command! PymodeVersion echomsg "Pymode version: " . g:pymode_version . " interpreter: " . g:pymode_python . " lint: " . g:pymode_lint . " rope: " . g:pymode_rope augroup pymode From b1d1468499ffd42d19bcffcbacfe1ada460db794 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 13 Mar 2014 23:00:29 +0700 Subject: [PATCH 079/428] Update autopep8 --- pymode/__init__.py | 7 +- pymode/autopep8.py | 2099 ++++++++++++++++++++++++++++++++------------ 2 files changed, 1524 insertions(+), 582 deletions(-) diff --git a/pymode/__init__.py b/pymode/__init__.py index e647147b..67578eb9 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -12,11 +12,13 @@ def auto(): from .autopep8 import fix_file class Options(object): - aggressive = 0 - line_range = None + aggressive = 2 diff = False + experimental = True ignore = vim.eval('g:pymode_lint_ignore') in_place = True + indent_size = int(vim.eval('&tabstop')) + line_range = None max_line_length = 79 pep8_passes = 100 recursive = False @@ -35,4 +37,3 @@ def get_documentation(): help(vim.eval('a:word')) sys.stdout, out = _, sys.stdout.getvalue() vim.current.buffer.append(str(out).splitlines(), 0) - diff --git a/pymode/autopep8.py b/pymode/autopep8.py index 3f400ddd..5f3ccf0b 100644 --- a/pymode/autopep8.py +++ b/pymode/autopep8.py @@ -2,6 +2,7 @@ # # Copyright (C) 2010-2011 Hideo Hattori # Copyright (C) 2011-2013 Hideo Hattori, Steven Myint +# Copyright (C) 2013-2014 Hideo Hattori, Steven Myint, Bill Wendling # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -40,20 +41,20 @@ from __future__ import unicode_literals import codecs +import collections import copy import difflib import fnmatch import inspect import io +import keyword import locale -import optparse import os import re import signal import sys import token import tokenize -import warnings from pylama.lint.pylama_pep8 import pep8 @@ -64,7 +65,7 @@ unicode = str -__version__ = '0.9.6' +__version__ = '1.0' CR = '\r' @@ -80,22 +81,25 @@ frozenset([',']), frozenset(['%']), frozenset([',', '(', '[', '{']), + frozenset(['%', '(', '[', '{']), frozenset([',', '(', '[', '{', '%', '+', '-', '*', '/', '//']), + frozenset(['%', '+', '-', '*', '/', '//']), ]) DEFAULT_IGNORE = 'E24' +DEFAULT_INDENT_SIZE = 4 # W602 is handled separately due to the need to avoid "with_traceback". CODE_TO_2TO3 = { + 'E721': ['idioms'], 'W601': ['has_key'], 'W603': ['ne'], 'W604': ['repr'], 'W690': ['apply', 'except', 'exitfunc', - 'idioms', 'import', 'numliterals', 'operator', @@ -134,10 +138,10 @@ def detect_encoding(filename): return 'latin-1' -def read_from_filename(filename, readlines=False): +def readlines_from_file(filename): """Return contents of file.""" with open_with_encoding(filename) as input_file: - return input_file.readlines() if readlines else input_file.read() + return input_file.readlines() def extended_blank_lines(logical_line, @@ -168,24 +172,27 @@ def extended_blank_lines(logical_line, def continued_indentation(logical_line, tokens, indent_level, noqa): - r"""Override pep8's function to provide indentation information.""" + """Override pep8's function to provide indentation information.""" first_row = tokens[0][2][0] nrows = 1 + tokens[-1][2][0] - first_row if noqa or nrows == 1: return - # indent_next tells us whether the next block is indented; assuming + # indent_next tells us whether the next block is indented. Assuming # that it is indented by 4 spaces, then we should not allow 4-space - # indents on the final continuation line; in turn, some other + # indents on the final continuation line. In turn, some other # indents are allowed to have an extra 4 spaces. indent_next = logical_line.endswith(':') row = depth = 0 - # remember how many brackets were opened on each line + + # Remember how many brackets were opened on each line. parens = [0] * nrows - # relative indents of physical lines + + # Relative indents of physical lines. rel_indent = [0] * nrows - # visual indents + + # Visual indents. indent_chances = {} last_indent = tokens[0][2] indent = [last_indent[1]] @@ -193,6 +200,7 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): last_token_multiline = None line = None last_line = '' + last_line_begins_with_multiline = False for token_type, text, start, end, line in tokens: newline = row < start[0] - first_row @@ -200,22 +208,23 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): row = start[0] - first_row newline = (not last_token_multiline and token_type not in (tokenize.NL, tokenize.NEWLINE)) + last_line_begins_with_multiline = last_token_multiline if newline: - # this is the beginning of a continuation line. + # This is the beginning of a continuation line. last_indent = start - # record the initial indent. + # Record the initial indent. rel_indent[row] = pep8.expand_indent(line) - indent_level if depth: - # a bracket expression in a continuation line. - # find the line that it was opened on + # A bracket expression in a continuation line. + # Find the line that it was opened on. for open_row in range(row - 1, -1, -1): if parens[open_row]: break else: - # an unbracketed continuation line (ie, backslash) + # An unbracketed continuation line (ie, backslash). open_row = 0 hang = rel_indent[row] - rel_indent[open_row] close_bracket = (token_type == tokenize.OP and text in ']})') @@ -223,63 +232,65 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): indent_chances.get(start[1])) if close_bracket and indent[depth]: - # closing bracket for visual indent + # Closing bracket for visual indent. if start[1] != indent[depth]: yield (start, 'E124 {0}'.format(indent[depth])) elif close_bracket and not hang: pass elif visual_indent is True: - # visual indent is verified + # Visual indent is verified. if not indent[depth]: indent[depth] = start[1] - elif ( - visual_indent in (text, unicode) and - not last_line.rstrip().endswith(',') - ): - # ignore token lined up with matching one from a previous line + elif visual_indent in (text, unicode): + # Ignore token lined up with matching one from a previous line. pass elif indent[depth] and start[1] < indent[depth]: - # visual indent is broken + # Visual indent is broken. yield (start, 'E128 {0}'.format(indent[depth])) - elif hang == 4 or (indent_next and rel_indent[row] == 8): - # hanging indent is verified + elif (hang == DEFAULT_INDENT_SIZE or + (indent_next and + rel_indent[row] == 2 * DEFAULT_INDENT_SIZE)): + # Hanging indent is verified. if close_bracket: yield (start, 'E123 {0}'.format(indent_level + rel_indent[open_row])) else: - one_indented = indent_level + rel_indent[open_row] + 4 - # indent is broken + one_indented = (indent_level + rel_indent[open_row] + + DEFAULT_INDENT_SIZE) + # Indent is broken. if hang <= 0: error = ('E122', one_indented) elif indent[depth]: error = ('E127', indent[depth]) - elif hang % 4: + elif hang % DEFAULT_INDENT_SIZE: error = ('E121', one_indented) else: error = ('E126', one_indented) + yield (start, '{0} {1}'.format(*error)) - # look for visual indenting + # Look for visual indenting. if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT) and not indent[depth]): indent[depth] = start[1] indent_chances[start[1]] = True - # deal with implicit string concatenation + # Deal with implicit string concatenation. elif (token_type in (tokenize.STRING, tokenize.COMMENT) or text in ('u', 'ur', 'b', 'br')): indent_chances[start[1]] = unicode - # special case for the "if" statement because len("if (") == 4 + # Special case for the "if" statement because len("if (") is equal to + # 4. elif not indent_chances and not row and not depth and text == 'if': indent_chances[end[1] + 1] = True - # keep track of bracket depth + # Keep track of bracket depth. if token_type == tokenize.OP: if text in '([{': depth += 1 indent.append(0) parens[row] += 1 elif text in ')]}' and depth > 0: - # parent indents should not be more than this one + # Parent indents should not be more than this one. prev_indent = indent.pop() or last_indent[1] for d in range(depth): if indent[d] > prev_indent: @@ -296,15 +307,24 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): rel_indent[row] = rel_indent[idx] break assert len(indent) == depth + 1 - if start[1] not in indent_chances: - # allow to line up tokens + if ( + start[1] not in indent_chances and + # This is for purposes of speeding up E121 (GitHub #90). + not last_line.rstrip().endswith(',') + ): + # Allow to line up tokens. indent_chances[start[1]] = text last_token_multiline = (start[0] != end[0]) last_line = line - if indent_next and pep8.expand_indent(line) == indent_level + 4: - yield (last_indent, 'E125 {0}'.format(indent_level + 8)) + if ( + indent_next and + not last_line_begins_with_multiline and + pep8.expand_indent(line) == indent_level + DEFAULT_INDENT_SIZE + ): + yield (last_indent, 'E125 {0}'.format(indent_level + + 2 * DEFAULT_INDENT_SIZE)) del pep8._checks['logical_line'][pep8.continued_indentation] pep8.register_check(continued_indentation) @@ -328,7 +348,6 @@ class FixPEP8(object): with the line number reported in the pep8 error information. [fixed method list] - - e111 - e121,e122,e123,e124,e125,e126,e127,e128,e129 - e201,e202,e203 - e211 @@ -342,29 +361,32 @@ class FixPEP8(object): - e502 - e701,e702 - e711 - - w291,w293 - - w391 + - w291 """ - def __init__(self, filename, options, contents=None): + def __init__(self, filename, + options, + contents=None, + long_line_ignore_cache=None): self.filename = filename if contents is None: - self.source = read_from_filename(filename, readlines=True) + self.source = readlines_from_file(filename) else: sio = io.StringIO(contents) self.source = sio.readlines() - self.newline = find_newline(self.source) self.options = options self.indent_word = _get_indentword(''.join(self.source)) + self.long_line_ignore_cache = ( + set() if long_line_ignore_cache is None + else long_line_ignore_cache) + # method definition - self.fix_e111 = self.fix_e101 self.fix_e121 = self._fix_reindent self.fix_e122 = self._fix_reindent self.fix_e123 = self._fix_reindent self.fix_e124 = self._fix_reindent - self.fix_e125 = self._fix_reindent self.fix_e126 = self._fix_reindent self.fix_e127 = self._fix_reindent self.fix_e128 = self._fix_reindent @@ -385,12 +407,21 @@ def __init__(self, filename, options, contents=None): self.fix_e273 = self.fix_e271 self.fix_e274 = self.fix_e271 self.fix_e309 = self.fix_e301 + self.fix_e501 = ( + self.fix_long_line_logically if + options and (options.aggressive >= 2 or options.experimental) else + self.fix_long_line_physically) self.fix_e703 = self.fix_e702 - self.fix_w191 = self.fix_e101 self._ws_comma_done = False def _fix_source(self, results): + try: + (logical_start, logical_end) = _find_logical(self.source) + logical_support = True + except (SyntaxError, tokenize.TokenError): # pragma: no cover + logical_support = False + completed_lines = set() for result in sorted(results, key=_priority_key): if result['line'] in completed_lines: @@ -405,23 +436,28 @@ def _fix_source(self, results): is_logical_fix = len(inspect.getargspec(fix).args) > 2 if is_logical_fix: - # Do not run logical fix if any lines have been modified. - if completed_lines: - continue - - logical = self._get_logical(result) - if not logical: - continue + logical = None + if logical_support: + logical = _get_logical(self.source, + result, + logical_start, + logical_end) + if logical and set(range( + logical[0][0] + 1, + logical[1][0] + 1)).intersection( + completed_lines): + continue modified_lines = fix(result, logical) else: modified_lines = fix(result) - if ( - modified_lines is None and - self.source[line_index] == original_line - ): - modified_lines = [] + if modified_lines is None: + # Force logical fixes to report what they modified. + assert not is_logical_fix + + if self.source[line_index] == original_line: + modified_lines = [] if modified_lines: completed_lines.update(modified_lines) @@ -472,89 +508,39 @@ def fix(self): self._fix_source(filter_results(source=''.join(self.source), results=results, - aggressive=self.options.aggressive)) + aggressive=self.options.aggressive, + indent_size=self.options.indent_size)) return ''.join(self.source) - def fix_e101(self, _): - """Reindent all lines.""" - reindenter = Reindenter(self.source, self.newline) - modified_line_numbers = reindenter.run() - if modified_line_numbers: - self.source = reindenter.fixed_lines() - return modified_line_numbers - else: - return [] - - def _find_logical(self): - # make a variable which is the index of all the starts of lines - logical_start = [] - logical_end = [] - last_newline = True - sio = io.StringIO(''.join(self.source)) - parens = 0 - for t in tokenize.generate_tokens(sio.readline): - if t[0] in [tokenize.COMMENT, tokenize.DEDENT, - tokenize.INDENT, tokenize.NL, - tokenize.ENDMARKER]: - continue - if not parens and t[0] in [tokenize.NEWLINE, tokenize.SEMI]: - last_newline = True - logical_end.append((t[3][0] - 1, t[2][1])) - continue - if last_newline and not parens: - logical_start.append((t[2][0] - 1, t[2][1])) - last_newline = False - if t[0] == tokenize.OP: - if t[1] in '([{': - parens += 1 - elif t[1] in '}])': - parens -= 1 - return (logical_start, logical_end) - - def _get_logical(self, result): - """Return the logical line corresponding to the result. - - Assumes input is already E702-clean. - - """ - try: - (logical_start, logical_end) = self._find_logical() - except (SyntaxError, tokenize.TokenError): - return None - - row = result['line'] - 1 - col = result['column'] - 1 - ls = None - le = None - for i in range(0, len(logical_start), 1): - x = logical_end[i] - if x[0] > row or (x[0] == row and x[1] > col): - le = x - ls = logical_start[i] - break - if ls is None: - return None - original = self.source[ls[0]:le[0] + 1] - return ls, le, original - def _fix_reindent(self, result): """Fix a badly indented line. This is done by adding or removing from its initial indent only. """ - num_indent = int(result['info'].split()[1]) + num_indent_spaces = int(result['info'].split()[1]) line_index = result['line'] - 1 target = self.source[line_index] - # When multiline strings are involved, pep8 reports the error as - # being at the start of the multiline string, which doesn't work - # for us. - if ('"""' in target or "'''" in target): - return [] + self.source[line_index] = ' ' * num_indent_spaces + target.lstrip() + + def fix_e125(self, result): + """Fix indentation undistinguish from the next logical line.""" + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + spaces_to_add = num_indent_spaces - len(_get_indentation(target)) + indent = len(_get_indentation(target)) + modified_lines = [] - self.source[line_index] = ( - ' ' * num_indent + target.lstrip()) + while len(_get_indentation(self.source[line_index])) >= indent: + self.source[line_index] = (' ' * spaces_to_add + + self.source[line_index]) + modified_lines.append(1 + line_index) # Line indexed at 1. + line_index -= 1 + + return modified_lines def fix_e201(self, result): """Remove extraneous whitespace.""" @@ -562,14 +548,7 @@ def fix_e201(self, result): target = self.source[line_index] offset = result['column'] - 1 - # When multiline strings are involved, pep8 reports the error as - # being at the start of the multiline string, which doesn't work - # for us. - if ( - '"""' in target or - "'''" in target or - target.rstrip().endswith('\\') - ): + if is_probably_part_of_multiline(target): return [] fixed = fix_whitespace(target, @@ -655,8 +634,7 @@ def fix_e262(self, result): code = target[:offset].rstrip(' \t#') comment = target[offset:].lstrip(' \t#') - fixed = code + (' # ' + comment if comment.strip() - else self.newline) + fixed = code + (' # ' + comment if comment.strip() else '\n') self.source[result['line'] - 1] = fixed @@ -666,12 +644,7 @@ def fix_e271(self, result): target = self.source[line_index] offset = result['column'] - 1 - # When multiline strings are involved, pep8 reports the error as - # being at the start of the multiline string, which doesn't work - # for us. - if ('"""' in target or - "'''" in target or - target.rstrip().endswith('\\')): + if is_probably_part_of_multiline(target): return [] fixed = fix_whitespace(target, @@ -685,13 +658,13 @@ def fix_e271(self, result): def fix_e301(self, result): """Add missing blank line.""" - cr = self.newline + cr = '\n' self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] def fix_e302(self, result): """Add missing 2 blank lines.""" add_linenum = 2 - int(result['info'].split()[-1]) - cr = self.newline * add_linenum + cr = '\n' * add_linenum self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] def fix_e303(self, result): @@ -728,101 +701,103 @@ def fix_e401(self, result): if not target.lstrip().startswith('import'): return [] - # pep8 (1.3.1) reports false positive if there is an import statement - # followed by a semicolon and some unrelated statement with commas in - # it. - if ';' in target: - return [] - indentation = re.split(pattern=r'\bimport\b', string=target, maxsplit=1)[0] - fixed = (target[:offset].rstrip('\t ,') + self.newline + + fixed = (target[:offset].rstrip('\t ,') + '\n' + indentation + 'import ' + target[offset:].lstrip('\t ,')) self.source[line_index] = fixed - def fix_e501(self, result): + def fix_long_line_logically(self, result, logical): """Try to make lines fit within --max-line-length characters.""" - line_index = result['line'] - 1 - target = self.source[line_index] + if ( + not logical or + len(logical[2]) == 1 or + self.source[result['line'] - 1].lstrip().startswith('#') + ): + return self.fix_long_line_physically(result) - if target.lstrip().startswith('#'): - last_comment = True - try: - if self.source[line_index + 1].lstrip().startswith('#'): - last_comment = False - except IndexError: - pass + start_line_index = logical[0][0] + end_line_index = logical[1][0] + logical_lines = logical[2] - # Wrap commented lines. - fixed = shorten_comment( - line=target, - newline=self.newline, - max_line_length=self.options.max_line_length, - last_comment=last_comment) - self.source[line_index] = fixed - return + previous_line = get_item(self.source, start_line_index - 1, default='') + next_line = get_item(self.source, end_line_index + 1, default='') - indent = _get_indentation(target) - source = target[len(indent):] - assert source.lstrip() == source - sio = io.StringIO(source) + single_line = join_logical_line(''.join(logical_lines)) - # Check for multiline string. try: - tokens = list(tokenize.generate_tokens(sio.readline)) + fixed = self.fix_long_line( + target=single_line, + previous_line=previous_line, + next_line=next_line, + original=''.join(logical_lines)) except (SyntaxError, tokenize.TokenError): - multiline_candidate = break_multiline( - target, newline=self.newline, - indent_word=self.indent_word) + return self.fix_long_line_physically(result) - if multiline_candidate: - self.source[line_index] = multiline_candidate - return - else: - return [] + if fixed: + for line_index in range(start_line_index, end_line_index + 1): + self.source[line_index] = '' + self.source[start_line_index] = fixed + return range(start_line_index + 1, end_line_index + 1) + else: + return [] - # Handle statements by putting the right hand side on a line by itself. - # This should let the next pass shorten it. - if self.options.aggressive and source.startswith('return '): - self.source[line_index] = ( - indent + - 'return (' + - self.newline + - indent + self.indent_word + re.sub('^return ', '', source) + - indent + ')' + self.newline - ) - return + def fix_long_line_physically(self, result): + """Try to make lines fit within --max-line-length characters.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + previous_line = get_item(self.source, line_index - 1, default='') + next_line = get_item(self.source, line_index + 1, default='') + + try: + fixed = self.fix_long_line( + target=target, + previous_line=previous_line, + next_line=next_line, + original=target) + except (SyntaxError, tokenize.TokenError): + return [] + + if fixed: + self.source[line_index] = fixed + return [line_index + 1] + else: + return [] - candidates = shorten_line( - tokens, source, indent, - self.indent_word, newline=self.newline, + def fix_long_line(self, target, previous_line, + next_line, original): + cache_entry = (target, previous_line, next_line) + if cache_entry in self.long_line_ignore_cache: + return [] + + if target.lstrip().startswith('#'): + # Wrap commented lines. + return shorten_comment( + line=target, + max_line_length=self.options.max_line_length, + last_comment=not next_line.lstrip().startswith('#')) + + fixed = get_fixed_long_line( + target=target, + previous_line=previous_line, + original=original, + indent_word=self.indent_word, + max_line_length=self.options.max_line_length, aggressive=self.options.aggressive, - previous_line=( - self.source[line_index - 1] if line_index >= 1 else '')) - - # Also sort alphabetically as a tie breaker (for determinism). - candidates = list(sorted( - sorted(set(candidates).union([target])), - key=lambda x: line_shortening_rank(x, - self.newline, - self.indent_word, - self.options.max_line_length))) - - if self.options.verbose >= 4: - print(('-' * 79 + '\n').join([''] + candidates + ['']), - file=codecs.getwriter('utf-8')(sys.stderr.buffer - if hasattr(sys.stderr, - 'buffer') - else sys.stderr)) - - if candidates: - self.source[line_index] = candidates[0] + experimental=self.options.experimental, + verbose=self.options.verbose) + if fixed and not code_almost_equal(original, fixed): + return fixed + else: + self.long_line_ignore_cache.add(cache_entry) + return None def fix_e502(self, result): """Remove extraneous escape of newline.""" line_index = result['line'] - 1 target = self.source[line_index] - self.source[line_index] = target.rstrip('\n\r \t\\') + self.newline + self.source[line_index] = target.rstrip('\n\r \t\\') + '\n' def fix_e701(self, result): """Put colon-separated compound statement on separate lines.""" @@ -830,7 +805,7 @@ def fix_e701(self, result): target = self.source[line_index] c = result['column'] - fixed_source = (target[:c] + self.newline + + fixed_source = (target[:c] + '\n' + _get_indentation(target) + self.indent_word + target[c:].lstrip('\n\r \t\\')) self.source[result['line'] - 1] = fixed_source @@ -838,6 +813,8 @@ def fix_e701(self, result): def fix_e702(self, result, logical): """Put semicolon-separated compound statement on separate lines.""" + if not logical: + return [] # pragma: no cover logical_lines = logical[2] line_index = result['line'] - 1 @@ -850,15 +827,16 @@ def fix_e702(self, result, logical): return [line_index + 1, line_index + 2] if target.rstrip().endswith(';'): - self.source[line_index] = target.rstrip('\n \r\t;') + self.newline - return + self.source[line_index] = target.rstrip('\n \r\t;') + '\n' + return [line_index + 1] offset = result['column'] - 1 first = target[:offset].rstrip(';').rstrip() second = (_get_indentation(logical_lines[0]) + target[offset:].lstrip(';').lstrip()) - self.source[line_index] = first + self.newline + second + self.source[line_index] = first + '\n' + second + return [line_index + 1] def fix_e711(self, result): """Fix comparison with None.""" @@ -928,29 +906,172 @@ def fix_e712(self, result): def fix_w291(self, result): """Remove trailing whitespace.""" fixed_line = self.source[result['line'] - 1].rstrip() - self.source[result['line'] - 1] = fixed_line + self.newline - - def fix_w293(self, result): - """Remove trailing whitespace on blank line.""" - assert not self.source[result['line'] - 1].strip() - self.source[result['line'] - 1] = self.newline - - def fix_w391(self, _): - """Remove trailing blank lines.""" - blank_count = 0 - for line in reversed(self.source): - line = line.rstrip() - if line: - break - else: - blank_count += 1 + self.source[result['line'] - 1] = fixed_line + '\n' + + +def get_fixed_long_line(target, previous_line, original, + indent_word=' ', max_line_length=79, + aggressive=False, experimental=False, verbose=False): + indent = _get_indentation(target) + source = target[len(indent):] + assert source.lstrip() == source + + # Check for partial multiline. + tokens = list(generate_tokens(source)) + + candidates = shorten_line( + tokens, source, indent, + indent_word, + max_line_length, + aggressive=aggressive, + experimental=experimental, + previous_line=previous_line) + + # Also sort alphabetically as a tie breaker (for determinism). + candidates = sorted( + sorted(set(candidates).union([target, original])), + key=lambda x: line_shortening_rank(x, + indent_word, + max_line_length)) + + if verbose >= 4: + print(('-' * 79 + '\n').join([''] + candidates + ['']), + file=codecs.getwriter('utf-8')(sys.stderr.buffer + if hasattr(sys.stderr, + 'buffer') + else sys.stderr)) + + if candidates: + return candidates[0] + + +def join_logical_line(logical_line): + """Return single line based on logical line input.""" + indentation = _get_indentation(logical_line) + + return indentation + untokenize_without_newlines( + generate_tokens(logical_line.lstrip())) + '\n' + + +def untokenize_without_newlines(tokens): + """Return source code based on tokens.""" + text = '' + last_row = 0 + last_column = -1 + + for t in tokens: + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + if start_row > last_row: + last_column = 0 + if ( + (start_column > last_column or token_string == '\n') and + not text.endswith(' ') + ): + text += ' ' + + if token_string != '\n': + text += token_string + + last_row = end_row + last_column = end_column + + return text + + +def _find_logical(source_lines): + # make a variable which is the index of all the starts of lines + logical_start = [] + logical_end = [] + last_newline = True + parens = 0 + for t in generate_tokens(''.join(source_lines)): + if t[0] in [tokenize.COMMENT, tokenize.DEDENT, + tokenize.INDENT, tokenize.NL, + tokenize.ENDMARKER]: + continue + if not parens and t[0] in [tokenize.NEWLINE, tokenize.SEMI]: + last_newline = True + logical_end.append((t[3][0] - 1, t[2][1])) + continue + if last_newline and not parens: + logical_start.append((t[2][0] - 1, t[2][1])) + last_newline = False + if t[0] == tokenize.OP: + if t[1] in '([{': + parens += 1 + elif t[1] in '}])': + parens -= 1 + return (logical_start, logical_end) + + +def _get_logical(source_lines, result, logical_start, logical_end): + """Return the logical line corresponding to the result. + + Assumes input is already E702-clean. + + """ + row = result['line'] - 1 + col = result['column'] - 1 + ls = None + le = None + for i in range(0, len(logical_start), 1): + assert logical_end + x = logical_end[i] + if x[0] > row or (x[0] == row and x[1] > col): + le = x + ls = logical_start[i] + break + if ls is None: + return None + original = source_lines[ls[0]:le[0] + 1] + return ls, le, original + + +def get_item(items, index, default=None): + if 0 <= index < len(items): + return items[index] + else: + return default + + +def reindent(source, indent_size): + """Reindent all lines.""" + reindenter = Reindenter(source) + return reindenter.run(indent_size) + + +def code_almost_equal(a, b): + """Return True if code is similar. + + Ignore whitespace when comparing specific line. + + """ + split_a = split_and_strip_non_empty_lines(a) + split_b = split_and_strip_non_empty_lines(b) + + if len(split_a) != len(split_b): + return False + + for index in range(len(split_a)): + if ''.join(split_a[index].split()) != ''.join(split_b[index].split()): + return False + + return True + + +def split_and_strip_non_empty_lines(text): + """Return lines split by newline. - original_length = len(self.source) - self.source = self.source[:original_length - blank_count] - return range(1, 1 + original_length) + Ignore empty lines. + + """ + return [line.strip() for line in text.splitlines() if line.strip()] -def fix_e26(source, aggressive=False, select='', ignore=''): +def fix_e269(source, aggressive=False): """Format block comments.""" if '#' not in source: # Optimization. @@ -972,8 +1093,12 @@ def fix_e26(source, aggressive=False, select='', ignore=''): # Normalize beginning if not a shebang. if len(line) > 1: - # Leave multiple spaces like '# ' alone. - if line.count('#') > 1 or line[1].isalnum(): + if ( + # Leave multiple spaces like '# ' alone. + (line.count('#') > 1 or line[1].isalnum()) + # Leave stylistic outlined blocks alone. + and not line.rstrip().endswith('#') + ): line = '# ' + line.lstrip('# \t') fixed_lines.append(indentation + line) @@ -1014,17 +1139,20 @@ def code_to_2to3(select, ignore): return fixes -def fix_w6(source, aggressive=True, select='', ignore=''): +def fix_2to3(source, aggressive=True, select=None, ignore=None): """Fix various deprecated code (via lib2to3).""" if not aggressive: return source + select = select or [] + ignore = ignore or [] + return refactor(source, code_to_2to3(select=select, ignore=ignore)) -def fix_w602(source, aggressive=True, select='', ignore=''): +def fix_w602(source, aggressive=True): """Fix deprecated form of raising exception.""" if not aggressive: return source @@ -1034,30 +1162,30 @@ def fix_w602(source, aggressive=True, select='', ignore=''): def find_newline(source): - """Return type of newline used in source.""" - cr, lf, crlf = 0, 0, 0 - for s in source: - if s.endswith(CRLF): - crlf += 1 - elif s.endswith(CR): - cr += 1 - elif s.endswith(LF): - lf += 1 - _max = max(lf, cr, crlf) - if _max == lf: - return LF - elif _max == crlf: - return CRLF - else: - return CR + """Return type of newline used in source. + + Input is a list of lines. + + """ + assert not isinstance(source, unicode) + + counter = collections.defaultdict(int) + for line in source: + if line.endswith(CRLF): + counter[CRLF] += 1 + elif line.endswith(CR): + counter[CR] += 1 + elif line.endswith(LF): + counter[LF] += 1 + + return (sorted(counter, key=counter.get, reverse=True) or [LF])[0] def _get_indentword(source): """Return indentation type.""" - sio = io.StringIO(source) indent_word = ' ' # Default in case source has no indentation try: - for t in tokenize.generate_tokens(sio.readline): + for t in generate_tokens(source): if t[0] == token.INDENT: indent_word = t[1] break @@ -1103,8 +1231,6 @@ def _priority_key(pep8_result): """ priority = [ - # Global fixes. - 'e101', 'e111', 'w191', # Fix multiline colon-based before semicolon based. 'e701', # Break multiline statements early. @@ -1116,16 +1242,24 @@ def _priority_key(pep8_result): # Shorten whitespace in comment before resorting to wrapping. 'e262' ] + middle_index = 10000 + lowest_priority = [ + # We need to shorten lines last since the logical fixer can get in a + # loop, which causes us to exit early. + 'e501' + ] key = pep8_result['id'].lower() - if key in priority: + try: return priority.index(key) - else: - # Lowest priority - return len(priority) + except ValueError: + try: + return middle_index + lowest_priority.index(key) + 1 + except ValueError: + return middle_index -def shorten_line(tokens, source, indentation, indent_word, newline, - aggressive=False, previous_line=''): +def shorten_line(tokens, source, indentation, indent_word, max_line_length, + aggressive=False, experimental=False, previous_line=''): """Separate line at OPERATOR. Multiple candidates will be yielded. @@ -1135,7 +1269,6 @@ def shorten_line(tokens, source, indentation, indent_word, newline, source=source, indentation=indentation, indent_word=indent_word, - newline=newline, aggressive=aggressive, previous_line=previous_line): yield candidate @@ -1147,44 +1280,56 @@ def shorten_line(tokens, source, indentation, indent_word, newline, source=source, indentation=indentation, indent_word=indent_word, - newline=newline, key_token_strings=key_token_strings, aggressive=aggressive) if shortened is not None and shortened != source: yield shortened + if experimental: + for shortened in _shorten_line_at_tokens_new( + tokens=tokens, + source=source, + indentation=indentation, + indent_word=indent_word, + max_line_length=max_line_length): + + yield shortened -def _shorten_line(tokens, source, indentation, indent_word, newline, + +def _shorten_line(tokens, source, indentation, indent_word, aggressive=False, previous_line=''): """Separate line at OPERATOR. + The input is expected to be free of newlines except for inside multiline + strings and at the end. + Multiple candidates will be yielded. """ - for tkn in tokens: - token_type = tkn[0] - token_string = tkn[1] + for (token_type, + token_string, + start_offset, + end_offset) in token_offsets(tokens): if ( token_type == tokenize.COMMENT and - not previous_line.rstrip().endswith('\\') and - not source[tkn[2][1] + 1:].strip().lower().startswith( + not is_probably_part_of_multiline(previous_line) and + not is_probably_part_of_multiline(source) and + not source[start_offset + 1:].strip().lower().startswith( ('noqa', 'pragma:', 'pylint:')) ): # Move inline comments to previous line. - offset = tkn[2][1] - first = source[:offset] - second = source[offset:] - yield (indentation + second.strip() + newline + - indentation + first.strip() + newline) + first = source[:start_offset] + second = source[start_offset:] + yield (indentation + second.strip() + '\n' + + indentation + first.strip() + '\n') elif token_type == token.OP and token_string != '=': # Don't break on '=' after keyword as this violates PEP 8. assert token_type != token.INDENT - offset = tkn[2][1] + 1 - first = source[:offset] + first = source[:end_offset] second_indent = indentation if first.rstrip().endswith('('): @@ -1194,7 +1339,7 @@ def _shorten_line(tokens, source, indentation, indent_word, newline, else: second_indent += indent_word - second = (second_indent + source[offset:].lstrip()) + second = (second_indent + source[end_offset:].lstrip()) if ( not second.strip() or second.lstrip().startswith('#') @@ -1208,46 +1353,772 @@ def _shorten_line(tokens, source, indentation, indent_word, newline, if first.rstrip().endswith('.'): continue if token_string in '+-*/': - fixed = first + ' \\' + newline + second + fixed = first + ' \\' + '\n' + second else: - fixed = first + newline + second + fixed = first + '\n' + second # Only fix if syntax is okay. - if check_syntax(normalize_multiline(fixed, newline=newline) + if check_syntax(normalize_multiline(fixed) if aggressive else fixed): yield indentation + fixed -def _shorten_line_at_tokens(tokens, source, indentation, indent_word, newline, +# A convenient way to handle tokens. +Token = collections.namedtuple('Token', ['token_type', 'token_string', + 'spos', 'epos', 'line']) + + +class ReformattedLines(object): + + """The reflowed lines of atoms. + + Each part of the line is represented as an "atom." They can be moved + around when need be to get the optimal formatting. + + """ + + ########################################################################### + # Private Classes + + class _Indent(object): + + """Represent an indentation in the atom stream.""" + + def __init__(self, indent_amt): + self._indent_amt = indent_amt + + def emit(self): + return ' ' * self._indent_amt + + @property + def size(self): + return self._indent_amt + + class _Space(object): + + """Represent a space in the atom stream.""" + + def emit(self): + return ' ' + + @property + def size(self): + return 1 + + class _LineBreak(object): + + """Represent a line break in the atom stream.""" + + def emit(self): + return '\n' + + @property + def size(self): + return 0 + + def __init__(self, max_line_length): + self._max_line_length = max_line_length + self._lines = [] + self._bracket_depth = 0 + self._prev_item = None + self._prev_prev_item = None + + def __repr__(self): + return self.emit() + + ########################################################################### + # Public Methods + + def add(self, obj, indent_amt): + if isinstance(obj, Atom): + self._add_item(obj, indent_amt) + return + + self._add_container(obj, indent_amt) + + def add_comment(self, item): + self._lines.append(self._Space()) + self._lines.append(self._Space()) + self._lines.append(item) + + def add_indent(self, indent_amt): + self._lines.append(self._Indent(indent_amt)) + + def add_line_break(self, indent): + self._lines.append(self._LineBreak()) + self.add_indent(len(indent)) + + def add_line_break_at(self, index, indent_amt): + self._lines.insert(index, self._LineBreak()) + self._lines.insert(index + 1, self._Indent(indent_amt)) + + def add_space_if_needed(self, curr_text, equal=False): + if ( + not self._lines or isinstance( + self._lines[-1], (self._LineBreak, self._Indent, self._Space)) + ): + return + + prev_text = unicode(self._prev_item) + prev_prev_text = \ + unicode(self._prev_prev_item) if self._prev_prev_item else '' + + if ( + # The previous item was a keyword or identifier and the current + # item isn't an operator that doesn't require a space. + ((self._prev_item.is_keyword or self._prev_item.is_string or + self._prev_item.is_name or self._prev_item.is_number) and + (curr_text[0] not in '([{.,:}])' or + (curr_text[0] == '=' and equal))) or + + # Don't place spaces around a '.', unless it's in an 'import' + # statement. + ((prev_prev_text != 'from' and prev_text[-1] != '.' and + curr_text != 'import') and + + # Don't place a space before a colon. + curr_text[0] != ':' and + + # Don't split up ending brackets by spaces. + ((prev_text[-1] in '}])' and curr_text[0] not in '.,}])') or + + # Put a space after a colon or comma. + prev_text[-1] in ':,' or + + # Put space around '=' if asked to. + (equal and prev_text == '=') or + + # Put spaces around non-unary arithmetic operators. + ((self._prev_prev_item and + (prev_text not in '+-' and + (self._prev_prev_item.is_name or + self._prev_prev_item.is_number or + self._prev_prev_item.is_string)) and + prev_text in ('+', '-', '%', '*', '/', '//', '**'))))) + ): + self._lines.append(self._Space()) + + def fits_on_current_line(self, item_extent): + return self.current_size() + item_extent <= self._max_line_length + + def current_size(self): + """The size of the current line minus the indentation.""" + size = 0 + for item in reversed(self._lines): + size += item.size + if isinstance(item, self._LineBreak): + break + + return size + + def line_empty(self): + return (self._lines and + isinstance(self._lines[-1], + (self._LineBreak, self._Indent))) + + def emit(self): + string = '' + for item in self._lines: + if isinstance(item, self._LineBreak): + string = string.rstrip() + string += item.emit() + + return string.rstrip() + '\n' + + ########################################################################### + # Private Methods + + def _add_item(self, item, indent_amt): + """Add an item to the line. + + Reflow the line to get the best formatting after the item is + inserted. The bracket depth indicates if the item is being + inserted inside of a container or not. + + """ + if self._prev_item and self._prev_item.is_string and item.is_string: + # Place consecutive string literals on separate lines. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + item_text = unicode(item) + if self._lines and self._bracket_depth: + # Adding the item into a container. + self._prevent_default_initializer_splitting(item, indent_amt) + + if item_text in '.,)]}': + self._split_after_delimiter(item, indent_amt) + + elif self._lines and not self.line_empty(): + # Adding the item outside of a container. + if self.fits_on_current_line(len(item_text)): + self._enforce_space(item) + + else: + # Line break for the new item. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + self._lines.append(item) + self._prev_item, self._prev_prev_item = item, self._prev_item + + if item_text in '([{': + self._bracket_depth += 1 + + elif item_text in '}])': + self._bracket_depth -= 1 + assert self._bracket_depth >= 0 + + def _add_container(self, container, indent_amt): + if ( + unicode(self._prev_item) != '=' and + not self.line_empty() and + not self.fits_on_current_line( + container.size + self._bracket_depth + 2) and + + # Don't split before the opening bracket of a call. + (unicode(container)[0] != '(' or not self._prev_item.is_name) + ): + # If the container doesn't fit on the current line and the current + # line isn't empty, place the container on the next line. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + # Increase the continued indentation only if recursing on a + # container. + container.reflow(self, ' ' * (indent_amt + 1)) + + def _prevent_default_initializer_splitting(self, item, indent_amt): + """Prevent splitting between a default initializer. + + When there is a default initializer, it's best to keep it all on + the same line. It's nicer and more readable, even if it goes + over the maximum allowable line length. This goes back along the + current line to determine if we have a default initializer, and, + if so, to remove extraneous whitespaces and add a line + break/indent before it if needed. + + """ + if unicode(item) == '=': + # This is the assignment in the initializer. Just remove spaces for + # now. + self._delete_whitespace() + return + + if (not self._prev_item or not self._prev_prev_item or + unicode(self._prev_item) != '='): + return + + self._delete_whitespace() + prev_prev_index = self._lines.index(self._prev_prev_item) + + if ( + isinstance(self._lines[prev_prev_index - 1], self._Indent) or + self.fits_on_current_line(item.size + 1) + ): + # The default initializer is already the only item on this line. + # Don't insert a newline here. + return + + # Replace the space with a newline/indent combo. + if isinstance(self._lines[prev_prev_index - 1], self._Space): + del self._lines[prev_prev_index - 1] + + self.add_line_break_at(self._lines.index(self._prev_prev_item), + indent_amt) + + def _split_after_delimiter(self, item, indent_amt): + """Split the line only after a delimiter.""" + self._delete_whitespace() + + if self.fits_on_current_line(item.size): + return + + last_space = None + for item in reversed(self._lines): + if isinstance(item, self._Space): + last_space = item + break + if isinstance(item, (self._LineBreak, self._Indent)): + return + + if not last_space: + return + + self.add_line_break_at(self._lines.index(last_space), indent_amt) + + def _enforce_space(self, item): + """Enforce a space in certain situations. + + There are cases where we will want a space where normally we + wouldn't put one. This just enforces the addition of a space. + + """ + if isinstance(self._lines[-1], + (self._Space, self._LineBreak, self._Indent)): + return + + if not self._prev_item: + return + + item_text = unicode(item) + prev_text = unicode(self._prev_item) + + # Prefer a space around a '.' in an import statement, and between the + # 'import' and '('. + if ( + (item_text == '.' and prev_text == 'from') or + (item_text == 'import' and prev_text == '.') or + (item_text == '(' and prev_text == 'import') + ): + self._lines.append(self._Space()) + + def _delete_whitespace(self): + """Delete all whitespace from the end of the line.""" + while isinstance(self._lines[-1], (self._Space, self._LineBreak, + self._Indent)): + del self._lines[-1] + + +class Atom(object): + + """The smallest unbreakable unit that can be reflowed.""" + + def __init__(self, atom): + self._atom = atom + + def __repr__(self): + return self._atom.token_string + + def __len__(self): + return self.size + + def reflow(self, reflowed_lines, continued_indent, extent, + break_after_open_bracket=False): + if self._atom.token_type == tokenize.COMMENT: + reflowed_lines.add_comment(self) + return + + total_size = extent if extent else self.size + + if self._atom.token_string not in ',:([{}])': + # Some atoms will need an extra 1-sized space token after them. + total_size += 1 + + if ( + not reflowed_lines.fits_on_current_line(total_size) and + not reflowed_lines.line_empty() + ): + # Start a new line if there is already something on the line and + # adding this atom would make it go over the max line length. + reflowed_lines.add_line_break(continued_indent) + else: + reflowed_lines.add_space_if_needed(unicode(self)) + + reflowed_lines.add(self, len(continued_indent)) + + def emit(self): + return self.__repr__() + + @property + def is_keyword(self): + return keyword.iskeyword(self._atom.token_string) + + @property + def is_string(self): + return self._atom.token_type == tokenize.STRING + + @property + def is_name(self): + return self._atom.token_type == tokenize.NAME + + @property + def is_number(self): + return self._atom.token_type == tokenize.NUMBER + + @property + def is_comma(self): + return self._atom.token_string == ',' + + @property + def is_colon(self): + return self._atom.token_string == ':' + + @property + def size(self): + return len(self._atom.token_string) + + +class Container(object): + + """Base class for all container types.""" + + def __init__(self, items): + self._items = items + + def __repr__(self): + string = '' + last_was_keyword = False + + for item in self._items: + if item.is_comma: + string += ', ' + elif item.is_colon: + string += ': ' + else: + item_string = unicode(item) + if ( + string and + (last_was_keyword or + (not string.endswith(tuple('([{,.:}]) ')) and + not item_string.startswith(tuple('([{,.:}])')))) + ): + string += ' ' + string += item_string + + last_was_keyword = item.is_keyword + return string + + def __iter__(self): + for element in self._items: + yield element + + def __getitem__(self, idx): + return self._items[idx] + + def reflow(self, reflowed_lines, continued_indent, + break_after_open_bracket=False): + for (index, item) in enumerate(self._items): + if isinstance(item, Atom): + item.reflow(reflowed_lines, continued_indent, + self._get_extent(index)) + else: # isinstance(item, Container) + reflowed_lines.add(item, len(continued_indent)) + + next_item = get_item(self._items, index + 1) + if ( + break_after_open_bracket and index == 0 and + # Prefer to keep empty containers together instead of + # separating them. + unicode(item) == self.open_bracket and + (not next_item or unicode(next_item) != self.close_bracket) and + (len(self._items) != 3 or not isinstance(next_item, Atom)) + ): + reflowed_lines.add_line_break(continued_indent) + break_after_open_bracket = False + else: + next_next_item = get_item(self._items, index + 2) + if ( + unicode(item) not in '.%' and next_item and + next_next_item and unicode(next_item) != ':' and + not isinstance(next_next_item, Atom) and + not reflowed_lines.line_empty() and + not reflowed_lines.fits_on_current_line( + next_item.size + next_next_item.size + 2) + ): + reflowed_lines.add_line_break(continued_indent) + + def _get_extent(self, index): + """The extent of the full element. + + E.g., the length of a function call or keyword. + """ + extent = 0 + while index < len(self._items): + item = get_item(self._items, index) + if unicode(item) not in '.=' and not item.is_name: + break + extent += len(item) + index += 1 + return extent + + @property + def is_string(self): + return False + + @property + def size(self): + return len(self.__repr__()) + + @property + def is_keyword(self): + return False + + @property + def is_name(self): + return False + + @property + def is_comma(self): + return False + + @property + def is_colon(self): + return False + + @property + def open_bracket(self): + return None + + @property + def close_bracket(self): + return None + + +class Tuple(Container): + + """A high-level representation of a tuple.""" + + @property + def open_bracket(self): + return '(' + + @property + def close_bracket(self): + return ')' + + +class List(Container): + + """A high-level representation of a list.""" + + @property + def open_bracket(self): + return '[' + + @property + def close_bracket(self): + return ']' + + +class DictOrSet(Container): + + """A high-level representation of a dictionary or set.""" + + @property + def open_bracket(self): + return '{' + + @property + def close_bracket(self): + return '}' + + +class ListComprehension(Container): + + """A high-level representation of a list comprehension.""" + + +class IfExpression(Container): + + """A high-level representation of an if-expression.""" + + +def _parse_container(tokens, index, for_or_if=None): + """Parse a high-level container, such as a list, tuple, etc.""" + + # Store the opening bracket. + items = [Atom(Token(*tokens[index]))] + index += 1 + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + if tok.token_string in ',)]}': + # First check if we're at the end of a list comprehension or + # if-expression. Don't add the ending token as part of the list + # comprehension or if-expression, because they aren't part of those + # constructs. + if for_or_if == 'for': + return (ListComprehension(items), index - 1) + + elif for_or_if == 'if': + return (IfExpression(items), index - 1) + + # We've reached the end of a container. + items.append(Atom(tok)) + + # If not, then we are at the end of a container. + if tok.token_string == ')': + # The end of a tuple. + return (Tuple(items), index) + + elif tok.token_string == ']': + # The end of a list. + return (List(items), index) + + elif tok.token_string == '}': + # The end of a dictionary or set. + return (DictOrSet(items), index) + + elif tok.token_string in '([{': + # A sub-container is being defined. + (container, index) = _parse_container(tokens, index) + items.append(container) + + elif tok.token_string == 'for': + (container, index) = _parse_container(tokens, index, 'for') + items.append(container) + + elif tok.token_string == 'if': + (container, index) = _parse_container(tokens, index, 'if') + items.append(container) + + else: + items.append(Atom(tok)) + + index += 1 + + +def _parse_tokens(tokens): + """Parse the tokens. + + This converts the tokens into a form where we can manipulate them + more easily. + + """ + + index = 0 + parsed_tokens = [] + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + assert tok.token_type != token.INDENT + if tok.token_type == tokenize.NEWLINE: + # There's only one newline and it's at the end. + break + + if tok.token_string in '([{': + (container, index) = _parse_container(tokens, index) + parsed_tokens.append(container) + else: + parsed_tokens.append(Atom(tok)) + + index += 1 + + return parsed_tokens + + +def _reflow_lines(parsed_tokens, indentation, indent_word, max_line_length, + start_on_prefix_line): + """Reflow the lines so that it looks nice.""" + + if unicode(parsed_tokens[0]) == 'def': + # A function definition gets indented a bit more. + continued_indent = indentation + ' ' * 2 * DEFAULT_INDENT_SIZE + else: + continued_indent = indentation + ' ' * DEFAULT_INDENT_SIZE + + break_after_open_bracket = not start_on_prefix_line + + lines = ReformattedLines(max_line_length) + lines.add_indent(len(indentation)) + + for item in parsed_tokens: + lines.add_space_if_needed(unicode(item), equal=True) + + save_continued_indent = continued_indent + if start_on_prefix_line and isinstance(item, Container): + start_on_prefix_line = False + continued_indent = ' ' * (lines.current_size() + 1) + + item.reflow(lines, continued_indent, break_after_open_bracket) + continued_indent = save_continued_indent + + return lines.emit() + + +def _shorten_line_at_tokens_new(tokens, source, indentation, indent_word, + max_line_length): + """Shorten the line taking its length into account. + + The input is expected to be free of newlines except for inside + multiline strings and at the end. + + """ + # Yield the original source so to see if it's a better choice than the + # shortened candidate lines we generate here. + yield indentation + source + + parsed_tokens = _parse_tokens(tokens) + + if parsed_tokens: + # Perform two reflows. The first one starts on the same line as the + # prefix. The second starts on the line after the prefix. + fixed = _reflow_lines(parsed_tokens, indentation, indent_word, + max_line_length, start_on_prefix_line=True) + if check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + fixed = _reflow_lines(parsed_tokens, indentation, indent_word, + max_line_length, start_on_prefix_line=False) + if check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + +def _shorten_line_at_tokens(tokens, source, indentation, indent_word, key_token_strings, aggressive): """Separate line by breaking at tokens in key_token_strings. - This will always break the line at the first parenthesis. + The input is expected to be free of newlines except for inside + multiline strings and at the end. """ offsets = [] - first_paren = True - for tkn in tokens: - token_type = tkn[0] - token_string = tkn[1] - next_offset = tkn[2][1] + 1 + for (index, _t) in enumerate(token_offsets(tokens)): + (token_type, + token_string, + start_offset, + end_offset) = _t assert token_type != token.INDENT - if token_string in key_token_strings or (first_paren and - token_string == '('): - # Don't split right before newline. - if next_offset < len(source) - 1: - offsets.append(next_offset) + if token_string in key_token_strings: + # Do not break in containers with zero or one items. + unwanted_next_token = { + '(': ')', + '[': ']', + '{': '}'}.get(token_string) + if unwanted_next_token: + if ( + get_item(tokens, + index + 1, + default=[None, None])[1] == unwanted_next_token or + get_item(tokens, + index + 2, + default=[None, None])[1] == unwanted_next_token + ): + continue - if token_string == '(': - first_paren = False + if ( + index > 2 and token_string == '(' and + tokens[index - 1][1] in ',(%[' + ): + # Don't split after a tuple start, or before a tuple start if + # the tuple is in a list. + continue + + if end_offset < len(source) - 1: + # Don't split right before newline. + offsets.append(end_offset) + else: + # Break at adjacent strings. These were probably meant to be on + # separate lines in the first place. + previous_token = get_item(tokens, index - 1) + if ( + token_type == tokenize.STRING and + previous_token and previous_token[0] == tokenize.STRING + ): + offsets.append(start_offset) current_indent = None fixed = None for line in split_at_offsets(source, offsets): if fixed: - fixed += newline + current_indent + line + fixed += '\n' + current_indent + line for symbol in '([{': if line.endswith(symbol): @@ -1260,33 +2131,60 @@ def _shorten_line_at_tokens(tokens, source, indentation, indent_word, newline, assert fixed is not None - if check_syntax(normalize_multiline(fixed, newline=newline) + if check_syntax(normalize_multiline(fixed) if aggressive > 1 else fixed): return indentation + fixed else: return None -def normalize_multiline(line, newline): - """Remove multiline-related code that will cause syntax error. +def token_offsets(tokens): + """Yield tokens and offsets.""" + end_offset = 0 + previous_end_row = 0 + previous_end_column = 0 + for t in tokens: + token_type = t[0] + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + # Account for the whitespace between tokens. + end_offset += start_column + if previous_end_row == start_row: + end_offset -= previous_end_column + + # Record the start offset of the token. + start_offset = end_offset + + # Account for the length of the token itself. + end_offset += len(token_string) + + yield (token_type, + token_string, + start_offset, + end_offset) + + previous_end_row = end_row + previous_end_column = end_column + + +def normalize_multiline(line): + """Normalize multiline-related code that will cause syntax error. This is for purposes of checking syntax. """ - for quote in '\'"': - dict_pattern = r'^{q}[^{q}]*{q} *: *'.format(q=quote) - if re.match(dict_pattern, line): - if not line.strip().endswith('}'): - line += '}' - return '{' + line - if line.startswith('def ') and line.rstrip().endswith(':'): - # Do not allow ':' to be alone. That is invalid. - split_line = [item.strip() for item in line.split(newline)] - if ':' not in split_line and 'def' not in split_line: - return line[len('def'):].strip().rstrip(':') - - return line + return line + ' pass' + elif line.startswith('return '): + return 'def _(): ' + line + elif line.startswith('@'): + return line + 'def _(): pass' + elif line.startswith('class '): + return line + ' pass' + else: + return line def fix_whitespace(line, offset, replacement): @@ -1335,6 +2233,10 @@ def full_error_results(self): return checker.report.full_error_results() +def _remove_leading_and_normalize(line): + return line.lstrip().rstrip(CR + LF) + '\n' + + class Reindenter(object): """Reindents badly-indented code to uniformly use four-space indentation. @@ -1343,52 +2245,52 @@ class Reindenter(object): """ - def __init__(self, input_text, newline): - self.newline = newline - - # Raw file lines. - self.raw = input_text - self.after = None + def __init__(self, input_text): + sio = io.StringIO(input_text) + source_lines = sio.readlines() - self.string_content_line_numbers = multiline_string_lines( - ''.join(self.raw)) + self.string_content_line_numbers = multiline_string_lines(input_text) # File lines, rstripped & tab-expanded. Dummy at start is so # that we can use tokenize's 1-based line numbering easily. # Note that a line is all-blank iff it is a newline. self.lines = [] - for line_number, line in enumerate(self.raw, start=1): + for line_number, line in enumerate(source_lines, start=1): # Do not modify if inside a multiline string. if line_number in self.string_content_line_numbers: self.lines.append(line) else: # Only expand leading tabs. self.lines.append(_get_indentation(line).expandtabs() + - line.strip() + newline) + _remove_leading_and_normalize(line)) self.lines.insert(0, None) self.index = 1 # index into self.lines of next line + self.input_text = input_text - def run(self): + def run(self, indent_size=DEFAULT_INDENT_SIZE): """Fix indentation and return modified line numbers. Line numbers are indexed at 1. """ + if indent_size < 1: + return self.input_text + try: stats = _reindent_stats(tokenize.generate_tokens(self.getline)) except (SyntaxError, tokenize.TokenError): - return set() + return self.input_text # Remove trailing empty lines. lines = self.lines - while lines and lines[-1] == self.newline: + while lines and lines[-1] == '\n': lines.pop() # Sentinel. stats.append((len(lines), 0)) # Map count of leading spaces to # we want. have2want = {} # Program after transformation. - after = self.after = [] + after = [] # Copy over initial empty lines -- there's nothing to do until # we see a line with *something* on it. i = stats[0][0] @@ -1397,7 +2299,7 @@ def run(self): thisstmt, thislevel = stats[i] nextstmt = stats[i + 1][0] have = _leading_space_count(lines[thisstmt]) - want = thislevel * 4 + want = thislevel * indent_size if want < 0: # A comment line. if have: @@ -1411,7 +2313,7 @@ def run(self): jline, jlevel = stats[j] if jlevel >= 0: if have == _leading_space_count(lines[jline]): - want = jlevel * 4 + want = jlevel * indent_size break if want < 0: # Maybe it's a hanging # comment like this one, @@ -1440,7 +2342,7 @@ def run(self): if line_number in self.string_content_line_numbers: after.append(line) elif diff > 0: - if line == self.newline: + if line == '\n': after.append(line) else: after.append(' ' * diff + line) @@ -1448,14 +2350,7 @@ def run(self): remove = min(_leading_space_count(line), -diff) after.append(line[remove:]) - if self.raw == self.after: - return set() - else: - return (set(range(1, 1 + len(self.raw))) - - self.string_content_line_numbers) - - def fixed_lines(self): - return self.after + return ''.join(after) def getline(self): """Line-getter for tokenize.""" @@ -1543,54 +2438,6 @@ def refactor_with_2to3(source_text, fixer_names): return source_text -def break_multiline(source_text, newline, indent_word): - """Break first line of multiline code. - - Return None if a break is not possible. - - """ - indentation = _get_indentation(source_text) - - # Handle special case only. - for symbol in '([{': - # Only valid if symbol is not on a line by itself. - if ( - symbol in source_text and - source_text.strip() != symbol and - source_text.rstrip().endswith((',', '%')) - ): - index = 1 + source_text.find(symbol) - - if index <= len(indent_word) + len(indentation): - continue - - if is_probably_inside_string_or_comment(source_text, index - 1): - continue - - return ( - source_text[:index].rstrip() + newline + - indentation + indent_word + - source_text[index:].lstrip()) - - return None - - -def is_probably_inside_string_or_comment(line, index): - """Return True if index may be inside a string or comment.""" - # Make sure we are not in a string. - for quote in ['"', "'"]: - if quote in line: - if line.find(quote) <= index: - return True - - # Make sure we are not in a comment. - if '#' in line: - if line.find('#') <= index: - return True - - return False - - def check_syntax(code): """Return True if syntax is okay.""" try: @@ -1599,7 +2446,7 @@ def check_syntax(code): return False -def filter_results(source, results, aggressive=False): +def filter_results(source, results, aggressive, indent_size): """Filter out spurious reports from pep8. If aggressive is True, we allow possibly unsafe fixes (E711, E712). @@ -1612,8 +2459,6 @@ def filter_results(source, results, aggressive=False): commented_out_code_line_numbers = commented_out_code_lines(source) - split_source = [None] + source.splitlines() - for r in results: issue_id = r['id'].lower() @@ -1633,12 +2478,6 @@ def filter_results(source, results, aggressive=False): if issue_id.startswith(('w29', 'w39')): continue - # Filter out incorrect E101 reports when there are no tabs. - # pep8 will complain about this even if the tab indentation found - # elsewhere is in a multiline string. - if issue_id == 'e101' and '\t' not in split_source[r['line']]: - continue - if aggressive <= 0: if issue_id.startswith(('e711', 'w6')): continue @@ -1662,11 +2501,10 @@ def multiline_string_lines(source, include_docstrings=False): Docstrings are ignored. """ - sio = io.StringIO(source) line_numbers = set() previous_token_type = '' try: - for t in tokenize.generate_tokens(sio.readline): + for t in generate_tokens(source): token_type = t[0] start_row = t[2][0] end_row = t[3][0] @@ -1694,10 +2532,9 @@ def commented_out_code_lines(source): clutter. """ - sio = io.StringIO(source) line_numbers = [] try: - for t in tokenize.generate_tokens(sio.readline): + for t in generate_tokens(source): token_type = t[0] token_string = t[1] start_row = t[2][0] @@ -1721,8 +2558,14 @@ def commented_out_code_lines(source): return line_numbers -def shorten_comment(line, newline, max_line_length, last_comment=False): - """Return trimmed or split long comment line.""" +def shorten_comment(line, max_line_length, last_comment=False): + """Return trimmed or split long comment line. + + If there are no comments immediately following it, do a text wrap. + Doing this wrapping on all comments in general would lead to jagged + comment text. + + """ assert len(line) > max_line_length line = line.rstrip() @@ -1737,7 +2580,7 @@ def shorten_comment(line, newline, max_line_length, last_comment=False): not line[-1].isalnum() ): # Trim comments that end with things like --------- - return line[:max_line_length] + newline + return line[:max_line_length] + '\n' elif last_comment and re.match(r'\s*#+\s*\w+', line): import textwrap split_lines = textwrap.wrap(line.lstrip(' \t#'), @@ -1746,18 +2589,17 @@ def shorten_comment(line, newline, max_line_length, last_comment=False): width=max_line_length, break_long_words=False, break_on_hyphens=False) - return newline.join(split_lines) + newline + return '\n'.join(split_lines) + '\n' else: - return line + newline + return line + '\n' -def normalize_line_endings(lines): +def normalize_line_endings(lines, newline): """Return fixed line endings. All lines will be modified to use the most common line ending. """ - newline = find_newline(lines) return [line.rstrip('\n\r') + newline for line in lines] @@ -1767,11 +2609,13 @@ def mutual_startswith(a, b): def code_match(code, select, ignore): if ignore: + assert not isinstance(ignore, unicode) for ignored_code in [c.strip() for c in ignore]: if mutual_startswith(code.lower(), ignored_code.lower()): return False if select: + assert not isinstance(select, unicode) for selected_code in [c.strip() for c in select]: if mutual_startswith(code.lower(), selected_code.lower()): return True @@ -1783,7 +2627,7 @@ def code_match(code, select, ignore): def fix_code(source, options=None): """Return fixed source code.""" if not options: - options = parse_args([''])[0] + options = parse_args(['']) if not isinstance(source, unicode): source = source.decode(locale.getpreferredencoding(False)) @@ -1792,19 +2636,15 @@ def fix_code(source, options=None): return fix_lines(sio.readlines(), options=options) -def fix_string(source, options=None): - """Deprecated.""" - warnings.warn('fix_string() is deprecated; use fix_code() instead', - DeprecationWarning) - return fix_code(source, options) - - def fix_lines(source_lines, options, filename=''): """Return fixed source code.""" - tmp_source = ''.join(normalize_line_endings(source_lines)) + # Transform everything to line feed. Then change them back to original + # before returning fixed source code. + original_newline = find_newline(source_lines) + tmp_source = ''.join(normalize_line_endings(source_lines, '\n')) # Keep a history to break out of cycles. - previous_hashes = set([hash(tmp_source)]) + previous_hashes = set() if options.line_range: fixed_source = tmp_source @@ -1813,29 +2653,33 @@ def fix_lines(source_lines, options, filename=''): fixed_source = apply_global_fixes(tmp_source, options) passes = 0 - while True: + long_line_ignore_cache = set() + while hash(fixed_source) not in previous_hashes: if options.pep8_passes >= 0 and passes > options.pep8_passes: break passes += 1 + previous_hashes.add(hash(fixed_source)) + tmp_source = copy.copy(fixed_source) - fix = FixPEP8(filename, options, contents=tmp_source) - fixed_source = fix.fix() + fix = FixPEP8( + filename, + options, + contents=tmp_source, + long_line_ignore_cache=long_line_ignore_cache) - if hash(fixed_source) in previous_hashes: - break - else: - previous_hashes.add(hash(fixed_source)) + fixed_source = fix.fix() - return fixed_source + sio = io.StringIO(fixed_source) + return ''.join(normalize_line_endings(sio.readlines(), original_newline)) def fix_file(filename, options=None, output=None): if not options: - options = parse_args([filename])[0] + options = parse_args([filename]) - original_source = read_from_filename(filename, readlines=True) + original_source = readlines_from_file(filename) fixed_source = original_source @@ -1893,15 +2737,22 @@ def apply_global_fixes(source, options): FixPEP8, which are dependent on pep8). """ + if code_match('E101', select=options.select, ignore=options.ignore): + source = reindent(source, + indent_size=options.indent_size) + for (code, function) in global_fixes(): if code_match(code, select=options.select, ignore=options.ignore): if options.verbose: print('---> Applying global fix for {0}'.format(code.upper()), file=sys.stderr) source = function(source, - aggressive=options.aggressive, - select=options.select, - ignore=options.ignore) + aggressive=options.aggressive) + + source = fix_2to3(source, + aggressive=options.aggressive, + select=options.select, + ignore=options.ignore) return source @@ -1925,123 +2776,133 @@ def extract_code_from_function(function): def create_parser(): """Return command-line parser.""" - parser = optparse.OptionParser(usage='Usage: %prog [options] ' - '[filename [filename ...]]' - '\nUse filename \'-\' for stdin.', - version='%prog {0}'.format(__version__), - description=docstring_summary(__doc__), - prog='autopep8') - parser.add_option('-v', '--verbose', action='count', dest='verbose', - default=0, - help='print verbose messages; ' - 'multiple -v result in more verbose messages') - parser.add_option('-d', '--diff', action='store_true', dest='diff', - help='print the diff for the fixed source') - parser.add_option('-i', '--in-place', action='store_true', - help='make changes to files in place') - parser.add_option('-r', '--recursive', action='store_true', - help='run recursively over directories; ' - 'must be used with --in-place or --diff') - parser.add_option('-j', '--jobs', type=int, metavar='n', default=1, - help='number of parallel jobs; ' - 'match CPU count if value is less than 1') - parser.add_option('-p', '--pep8-passes', metavar='n', - default=-1, type=int, - help='maximum number of additional pep8 passes ' - '(default: infinite)') - parser.add_option('-a', '--aggressive', action='count', default=0, - help='enable non-whitespace changes; ' - 'multiple -a result in more aggressive changes') - parser.add_option('--exclude', metavar='globs', - help='exclude file/directory names that match these ' - 'comma-separated globs') - parser.add_option('--list-fixes', action='store_true', - help='list codes for fixes; ' - 'used by --ignore and --select') - parser.add_option('--ignore', metavar='errors', default='', - help='do not fix these errors/warnings ' - '(default: {0})'.format(DEFAULT_IGNORE)) - parser.add_option('--select', metavar='errors', default='', - help='fix only these errors/warnings (e.g. E4,W)') - parser.add_option('--max-line-length', metavar='n', default=79, type=int, - help='set maximum allowed line length ' - '(default: %default)') - parser.add_option('--range', metavar='start end', dest='line_range', - default=None, type=int, nargs=2, - help='only fix errors found within this inclusive ' - 'range of line numbers (e.g. 1 99); ' - 'line numbers are indexed at 1') + # Do import locally to be friendly to those who use autopep8 as a library + # and are supporting Python 2.6. + import argparse + + parser = argparse.ArgumentParser(description=docstring_summary(__doc__), + prog='autopep8') + parser.add_argument('--version', action='version', + version='%(prog)s ' + __version__) + parser.add_argument('-v', '--verbose', action='count', dest='verbose', + default=0, + help='print verbose messages; ' + 'multiple -v result in more verbose messages') + parser.add_argument('-d', '--diff', action='store_true', dest='diff', + help='print the diff for the fixed source') + parser.add_argument('-i', '--in-place', action='store_true', + help='make changes to files in place') + parser.add_argument('-r', '--recursive', action='store_true', + help='run recursively over directories; ' + 'must be used with --in-place or --diff') + parser.add_argument('-j', '--jobs', type=int, metavar='n', default=1, + help='number of parallel jobs; ' + 'match CPU count if value is less than 1') + parser.add_argument('-p', '--pep8-passes', metavar='n', + default=-1, type=int, + help='maximum number of additional pep8 passes ' + '(default: infinite)') + parser.add_argument('-a', '--aggressive', action='count', default=0, + help='enable non-whitespace changes; ' + 'multiple -a result in more aggressive changes') + parser.add_argument('--experimental', action='store_true', + help='enable experimental fixes') + parser.add_argument('--exclude', metavar='globs', + help='exclude file/directory names that match these ' + 'comma-separated globs') + parser.add_argument('--list-fixes', action='store_true', + help='list codes for fixes; ' + 'used by --ignore and --select') + parser.add_argument('--ignore', metavar='errors', default='', + help='do not fix these errors/warnings ' + '(default: {0})'.format(DEFAULT_IGNORE)) + parser.add_argument('--select', metavar='errors', default='', + help='fix only these errors/warnings (e.g. E4,W)') + parser.add_argument('--max-line-length', metavar='n', default=79, type=int, + help='set maximum allowed line length ' + '(default: %(default)s)') + parser.add_argument('--range', metavar='line', dest='line_range', + default=None, type=int, nargs=2, + help='only fix errors found within this inclusive ' + 'range of line numbers (e.g. 1 99); ' + 'line numbers are indexed at 1') + parser.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE, + type=int, metavar='n', + help='number of spaces per indent level ' + '(default %(default)s)') + parser.add_argument('files', nargs='*', + help="files to format or '-' for standard in") return parser -def parse_args(args): +def parse_args(arguments): """Parse command-line options.""" parser = create_parser() - options, args = parser.parse_args(args) + args = parser.parse_args(arguments) - if not len(args) and not options.list_fixes: + if not args.files and not args.list_fixes: parser.error('incorrect number of arguments') - args = [decode_filename(name) for name in args] + args.files = [decode_filename(name) for name in args.files] - if '-' in args: - if len(args) > 1: + if '-' in args.files: + if len(args.files) > 1: parser.error('cannot mix stdin and regular files') - if options.diff: + if args.diff: parser.error('--diff cannot be used with standard input') - if options.in_place: + if args.in_place: parser.error('--in-place cannot be used with standard input') - if options.recursive: + if args.recursive: parser.error('--recursive cannot be used with standard input') - if len(args) > 1 and not (options.in_place or options.diff): + if len(args.files) > 1 and not (args.in_place or args.diff): parser.error('autopep8 only takes one filename as argument ' - 'unless the "--in-place" or "--diff" options are ' + 'unless the "--in-place" or "--diff" args are ' 'used') - if options.recursive and not (options.in_place or options.diff): + if args.recursive and not (args.in_place or args.diff): parser.error('--recursive must be used with --in-place or --diff') - if options.exclude and not options.recursive: + if args.exclude and not args.recursive: parser.error('--exclude is only relevant when used with --recursive') - if options.in_place and options.diff: + if args.in_place and args.diff: parser.error('--in-place and --diff are mutually exclusive') - if options.max_line_length <= 0: + if args.max_line_length <= 0: parser.error('--max-line-length must be greater than 0') - if options.select: - options.select = options.select.split(',') + if args.select: + args.select = args.select.split(',') - if options.ignore: - options.ignore = options.ignore.split(',') - elif not options.select: - if options.aggressive: + if args.ignore: + args.ignore = args.ignore.split(',') + elif not args.select: + if args.aggressive: # Enable everything by default if aggressive. - options.select = ['E', 'W'] + args.select = ['E', 'W'] else: - options.ignore = DEFAULT_IGNORE.split(',') + args.ignore = DEFAULT_IGNORE.split(',') - if options.exclude: - options.exclude = options.exclude.split(',') + if args.exclude: + args.exclude = args.exclude.split(',') else: - options.exclude = [] + args.exclude = [] - if options.jobs < 1: + if args.jobs < 1: # Do not import multiprocessing globally in case it is not supported # on the platform. import multiprocessing - options.jobs = multiprocessing.cpu_count() + args.jobs = multiprocessing.cpu_count() - if options.jobs > 1 and not options.in_place: + if args.jobs > 1 and not args.in_place: parser.error('parallel jobs requires --in-place') - return options, args + return args def decode_filename(filename): @@ -2059,6 +2920,8 @@ def supported_fixes(): description. """ + yield ('E101', docstring_summary(reindent.__doc__)) + instance = FixPEP8(filename=None, options=None, contents='') for attribute in dir(instance): code = re.match('fix_([ew][0-9][0-9][0-9])', attribute) @@ -2073,81 +2936,121 @@ def supported_fixes(): yield (code.upper() + (4 - len(code)) * ' ', re.sub(r'\s+', ' ', docstring_summary(function.__doc__))) + for code in sorted(CODE_TO_2TO3): + yield (code.upper() + (4 - len(code)) * ' ', + re.sub(r'\s+', ' ', docstring_summary(fix_2to3.__doc__))) + def docstring_summary(docstring): """Return summary of docstring.""" return docstring.split('\n')[0] -def line_shortening_rank(candidate, newline, indent_word, max_line_length): +def line_shortening_rank(candidate, indent_word, max_line_length): """Return rank of candidate. This is for sorting candidates. """ + if not candidate.strip(): + return 0 + rank = 0 - if candidate.strip(): - lines = candidate.split(newline) + lines = candidate.split('\n') + + offset = 0 + if ( + not lines[0].lstrip().startswith('#') and + lines[0].rstrip()[-1] not in '([{' + ): + for symbol in '([{': + offset = max(offset, 1 + lines[0].find(symbol)) + + current_longest = max(offset + len(x.strip()) for x in lines) + + rank += 2 * max(0, current_longest - max_line_length) + + rank += len(lines) - offset = 0 + # Too much variation in line length is ugly. + rank += 2 * standard_deviation(len(line) for line in lines) + + bad_staring_symbol = { + '(': ')', + '[': ']', + '{': '}'}.get(lines[0][-1]) + + if len(lines) > 1: if ( - not lines[0].lstrip().startswith('#') and - lines[0].rstrip()[-1] not in '([{' + bad_staring_symbol and + lines[1].lstrip().startswith(bad_staring_symbol) ): - for symbol in '([{': - offset = max(offset, 1 + lines[0].find(symbol)) + rank += 20 + + for lineno, current_line in enumerate(lines): + current_line = current_line.strip() - current_longest = max(offset + len(x.strip()) for x in lines) + if current_line.startswith('#'): + continue - rank += max(0, current_longest - max_line_length) + for bad_start in ['.', '%', '+', '-', '/']: + if current_line.startswith(bad_start): + rank += 100 - rank += len(lines) + # Do not tolerate operators on their own line. + if current_line == bad_start: + rank += 1000 - # Too much variation in line length is ugly. - rank += 2 * standard_deviation(len(line) for line in lines) + if current_line.endswith(('(', '[', '{')): + # Avoid lonely opening. They result in longer lines. + if len(current_line) <= len(indent_word): + rank += 100 - bad_staring_symbol = { - '(': ')', - '[': ']', - '{': '}'}.get(lines[0][-1], None) + # Avoid ugliness of ", (\n". + if current_line.endswith(','): + rank += 100 - if len(lines) > 1: - if ( - bad_staring_symbol and - lines[1].lstrip().startswith(bad_staring_symbol) - ): - rank += 20 + if has_arithmetic_operator(current_line): + rank += 100 - for current_line in lines: - if current_line.lstrip().startswith('#'): - continue + if current_line.endswith(('%', '(', '[', '{')): + rank -= 20 - for bad_start in ['.', '%', '+', '-', '/']: - if current_line.startswith(bad_start): - rank += 100 + # Try to break list comprehensions at the "for". + if current_line.startswith('for '): + rank -= 50 - if current_line.endswith(('(', '[', '{')): - # Avoid lonely opening. They result in longer lines. - if len(current_line.strip()) <= len(indent_word): - rank += 100 + if current_line.endswith('\\'): + # If a line ends in \-newline, it may be part of a + # multiline string. In that case, we would like to know + # how long that line is without the \-newline. If it's + # longer than the maximum, or has comments, then we assume + # that the \-newline is an okay candidate and only + # penalize it a bit. + total_len = len(current_line) + lineno += 1 + while lineno < len(lines): + total_len += len(lines[lineno]) - # Avoid ugliness of ", (\n". - if current_line[:-1].rstrip().endswith(','): - rank += 100 + if lines[lineno].lstrip().startswith('#'): + total_len = max_line_length + break - if has_arithmetic_operator(current_line): - rank += 100 + if not lines[lineno].endswith('\\'): + break - if current_line.endswith('%'): - rank -= 20 + lineno += 1 - # Try to break list comprehensions at the "for". - if current_line.lstrip().startswith('for'): - rank -= 50 + if total_len < max_line_length: + rank += 10 + else: + rank += 1 - rank += 10 * count_unbalanced_brackets(current_line) - else: - rank = 100000 + # Prefer breaking at commas rather than colon. + if ',' in current_line and current_line.endswith(':'): + rank += 10 + + rank += 10 * count_unbalanced_brackets(current_line) return max(0, rank) @@ -2192,7 +3095,7 @@ def split_at_offsets(line, offsets): current_offset = 0 for current_offset in sorted(offsets): if current_offset < len(line) and previous_offset != current_offset: - result.append(line[previous_offset:current_offset]) + result.append(line[previous_offset:current_offset].strip()) previous_offset = current_offset result.append(line[current_offset:]) @@ -2296,6 +3199,20 @@ def is_python_file(filename): return True +def is_probably_part_of_multiline(line): + """Return True if line is likely part of a multiline string. + + When multiline strings are involved, pep8 reports the error as being + at the start of the multiline string, which doesn't work for us. + + """ + return ( + '"""' in line or + "'''" in line or + line.rstrip().endswith('\\') + ) + + def main(): """Tool main.""" try: @@ -2306,33 +3223,57 @@ def main(): pass try: - options, args = parse_args(sys.argv[1:]) + args = parse_args(sys.argv[1:]) - if options.list_fixes: - for code, description in supported_fixes(): + if args.list_fixes: + for code, description in sorted(supported_fixes()): print('{code} - {description}'.format( code=code, description=description)) return 0 - if args == ['-']: - assert not options.in_place + if args.files == ['-']: + assert not args.in_place # LineEndingWrapper is unnecessary here due to the symmetry between # standard in and standard out. - sys.stdout.write(fix_code(sys.stdin.read(), options)) + sys.stdout.write(fix_code(sys.stdin.read(), args)) else: - if options.in_place or options.diff: - filenames = list(set(args)) + if args.in_place or args.diff: + args.files = list(set(args.files)) else: - assert len(args) == 1 - assert not options.recursive + assert len(args.files) == 1 + assert not args.recursive - filenames = args[:1] - - fix_multiple_files(filenames, options, sys.stdout) + fix_multiple_files(args.files, args, sys.stdout) except KeyboardInterrupt: return 1 # pragma: no cover +class CachedTokenizer(object): + + """A one-element cache around tokenize.generate_tokens(). + + Original code written by Ned Batchelder, in coverage.py. + + """ + + def __init__(self): + self.last_text = None + self.last_tokens = None + + def generate_tokens(self, text): + """A stand-in for tokenize.generate_tokens().""" + if text != self.last_text: + string_io = io.StringIO(text) + self.last_tokens = list( + tokenize.generate_tokens(string_io.readline) + ) + self.last_text = text + return self.last_tokens + +_cached_tokenizer = CachedTokenizer() +generate_tokens = _cached_tokenizer.generate_tokens + + if __name__ == '__main__': sys.exit(main()) From fa160971ba62cf26ee0e38e4bbd874119f7a565a Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 14 Mar 2014 22:06:59 +0700 Subject: [PATCH 080/428] Fix run --- pymode/run.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pymode/run.py b/pymode/run.py index 82d30b57..f7ddddaf 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -73,4 +73,6 @@ def __prepare_lines(line1, line2): indent = len(line) - len(line.lstrip()) break + if len(lines) == 1: + lines.append('') return [l[indent:] for l in lines] From c8074ce46e581d51eb4740349fe5dd986b628276 Mon Sep 17 00:00:00 2001 From: Wouter Overmeire Date: Tue, 18 Mar 2014 14:18:51 +0100 Subject: [PATCH 081/428] doc update troubleshooting func name ```fun! pymode#troubleshooting#Test()``` has been renamed to ```fun! pymode#troubleshooting#test()``` in 16c0a719 --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index f37e45d2..23c2710d 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -623,7 +623,7 @@ Highlight docstrings as pythonDocstring (otherwise as pythonString) Python-mode doesn't work ------------------------ -Open any python file and run ":call pymode#troubleshooting#Test()", +Open any python file and run ":call pymode#troubleshooting#test()", fix the warning or send me the output. From 20e14aa352231d4a9fe497cb0adea9cdb57b6c20 Mon Sep 17 00:00:00 2001 From: Wouter Overmeire Date: Tue, 18 Mar 2014 16:29:26 +0100 Subject: [PATCH 082/428] fix #385 (syntax highlighting failure) Syntax highlight failure resolved by skipping everything between ```'``` or ```"```. Before this fix the code below had incorrect syntax highlighting. ![image](http://i.imgur.com/017h22W.png) --- syntax/python.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syntax/python.vim b/syntax/python.vim index e0416589..4c218c04 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -78,7 +78,7 @@ endif syn keyword pythonStatement def nextgroup=pythonFunction skipwhite syn match pythonFunction "\%(\%(def\s\|@\)\s*\)\@<=\h\%(\w\|\.\)*" contained nextgroup=pythonVars - syn region pythonVars start="(" end=")" contained contains=pythonParameters transparent keepend + syn region pythonVars start="(" skip=+\(".*"\|'.*'\)+ end=")" contained contains=pythonParameters transparent keepend syn match pythonParameters "[^,]*" contained contains=pythonParam skipwhite syn match pythonParam "[^,]*" contained contains=pythonExtraOperator,pythonLambdaExpr,pythonBuiltinObj,pythonBuiltinType,pythonConstant,pythonString,pythonNumber,pythonBrackets,pythonSelf skipwhite syn match pythonBrackets "{[(|)]}" contained skipwhite From f0e6b4031bc66bf72a362e5d68b6d7e661a146cb Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 24 Mar 2014 17:50:49 +0700 Subject: [PATCH 083/428] Update pylama to version 3.0.1 --- autoload/pymode/tools/signs.vim | 1 + plugin/pymode.vim | 1 + pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/config.py | 258 ++-- pymode/libs/pylama/core.py | 43 +- pymode/libs/pylama/hook.py | 3 - .../pylama/lint/pylama_pep257/__init__.py | 9 +- .../libs/pylama/lint/pylama_pep257/pep257.py | 1207 +++++++++-------- .../libs/pylama/lint/pylama_pep8/__init__.py | 2 +- pymode/libs/pylama/lint/pylama_pep8/pep8.py | 117 +- .../pylama/lint/pylama_pyflakes/__init__.py | 34 +- .../lint/pylama_pyflakes/pyflakes/__init__.py | 2 +- .../lint/pylama_pyflakes/pyflakes/checker.py | 146 +- .../lint/pylama_pyflakes/pyflakes/messages.py | 33 +- .../pylama/lint/pylama_pylint/__init__.py | 2 +- .../lint/pylama_pylint/astroid/__init__.py | 20 +- .../lint/pylama_pylint/astroid/as_string.py | 14 +- .../lint/pylama_pylint/astroid/brain/py2gi.py | 147 ++ .../astroid/brain/py2mechanize.py | 20 + .../pylama_pylint/astroid/brain/py2qt4.py | 25 + .../pylama_pylint/astroid/brain/py2stdlib.py | 227 ++++ .../lint/pylama_pylint/astroid/builder.py | 1 + .../lint/pylama_pylint/astroid/protocols.py | 2 + .../pylama_pylint/astroid/raw_building.py | 15 +- .../lint/pylama_pylint/astroid/rebuilder.py | 56 +- .../pylama_pylint/astroid/scoped_nodes.py | 22 + .../logilab/common/__pkginfo__.py | 2 +- .../logilab/common/deprecation.py | 6 +- .../pylama_pylint/logilab/common/graph.py | 4 +- .../pylama_pylint/logilab/common/modutils.py | 21 +- pymode/libs/pylama/lint/pylama_pylint/main.py | 8 +- .../lint/pylama_pylint/pylint/__init__.py | 2 +- .../lint/pylama_pylint/pylint/__pkginfo__.py | 6 +- .../pylama_pylint/pylint/checkers/__init__.py | 6 +- .../pylama_pylint/pylint/checkers/base.py | 164 ++- .../pylama_pylint/pylint/checkers/classes.py | 10 +- .../pylint/checkers/design_analysis.py | 51 +- .../pylint/checkers/exceptions.py | 69 +- .../pylama_pylint/pylint/checkers/format.py | 14 +- .../pylama_pylint/pylint/checkers/imports.py | 13 +- .../pylama_pylint/pylint/checkers/logging.py | 36 +- .../pylama_pylint/pylint/checkers/misc.py | 2 +- .../pylama_pylint/pylint/checkers/newstyle.py | 7 +- .../pylint/checkers/raw_metrics.py | 6 +- .../pylama_pylint/pylint/checkers/similar.py | 20 +- .../pylama_pylint/pylint/checkers/stdlib.py | 4 +- .../pylama_pylint/pylint/checkers/strings.py | 8 +- .../pylint/checkers/typecheck.py | 6 +- .../pylama_pylint/pylint/checkers/utils.py | 67 +- .../pylint/checkers/variables.py | 88 +- .../lint/pylama_pylint/pylint/config.py | 19 +- .../lint/pylama_pylint/pylint/interfaces.py | 2 +- .../pylama/lint/pylama_pylint/pylint/lint.py | 106 +- .../pylint/reporters/__init__.py | 2 +- .../pylama_pylint/pylint/reporters/html.py | 2 +- .../pylama_pylint/pylint/reporters/text.py | 6 +- .../pylama/lint/pylama_pylint/pylint/utils.py | 20 +- pymode/libs/pylama/tasks.py | 3 +- 58 files changed, 1995 insertions(+), 1194 deletions(-) create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py create mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py diff --git a/autoload/pymode/tools/signs.vim b/autoload/pymode/tools/signs.vim index 54e46643..3487cf85 100644 --- a/autoload/pymode/tools/signs.vim +++ b/autoload/pymode/tools/signs.vim @@ -14,6 +14,7 @@ endfunction "}}} fun! g:PymodeSigns.setup() "{{{ if self.enabled() execute 'sign define PymodeW text=' . g:pymode_lint_todo_symbol . " texthl=Todo" + execute 'sign define PymodeD text=' . g:pymode_lint_docs_symbol . " texthl=String" execute 'sign define PymodeC text=' . g:pymode_lint_comment_symbol . " texthl=Comment" execute 'sign define PymodeR text=' . g:pymode_lint_visual_symbol . " texthl=Visual" execute 'sign define PymodeE text=' . g:pymode_lint_error_symbol . " texthl=Error" diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e84cd651..9e08f33d 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -120,6 +120,7 @@ call pymode#default("g:pymode_lint_signs", 1) " Symbol's definitions call pymode#default("g:pymode_lint_todo_symbol", "WW") +call pymode#default("g:pymode_lint_docs_symbol", "DD") call pymode#default("g:pymode_lint_comment_symbol", "CC") call pymode#default("g:pymode_lint_visual_symbol", "RR") call pymode#default("g:pymode_lint_error_symbol", "EE") diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index 71ae8c59..d030cbc4 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -version_info = 2, 0, 4 +version_info = 3, 0, 0 __version__ = version = '.'.join(map(str, version_info)) __project__ = __name__ diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index c7c4eb15..04ac4bb6 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -1,18 +1,23 @@ """ Parse arguments from command line and configuration files. """ - import fnmatch +import sys from os import getcwd, path from re import compile as re import logging -from argparse import ArgumentParser, Namespace as Options +from argparse import ArgumentParser from . import version -from .core import LOGGER, STREAM from .libs.inirama import Namespace from .lint.extensions import LINTERS +# Setup a logger +LOGGER = logging.getLogger('pylama') +LOGGER.propagate = False +STREAM = logging.StreamHandler(sys.stdout) +LOGGER.addHandler(STREAM) + #: A default checkers DEFAULT_LINTERS = 'pep8', 'pyflakes', 'mccabe' @@ -20,58 +25,136 @@ DEFAULT_INI_PATH = path.join(CURDIR, 'pylama.ini') -def parse_options( - args=None, async=False, select='', ignore='', linters=DEFAULT_LINTERS, - options=DEFAULT_INI_PATH): +class _Default(object): + + def __init__(self, value=None): + self.value = value + + def __str__(self): + return str(self.value) + + __repr__ = lambda s: "<_Default [%s]>" % s.value + + +def split_csp_str(s): + """ Split commaseparated string. + + :returns: list of splitted values + + """ + if isinstance(s, (list, tuple)): + return s + return list(set(i for i in s.strip().split(',') if i)) + + +def parse_linters(linters): + """ Initialize choosen linters. + + :returns: list of inited linters + + """ + result = list() + for name in split_csp_str(linters): + linter = LINTERS.get(name) + if linter: + result.append((name, linter)) + else: + logging.warn("Linter `%s` not found.", name) + return result + + +PARSER = ArgumentParser(description="Code audit tool for python.") +PARSER.add_argument( + "path", nargs='?', default=_Default(CURDIR), + help="Path on file or directory.") + +PARSER.add_argument( + "--verbose", "-v", action='store_true', help="Verbose mode.") + +PARSER.add_argument('--version', action='version', + version='%(prog)s ' + version) + +PARSER.add_argument( + "--format", "-f", default=_Default('pep8'), choices=['pep8', 'pylint'], + help="Error format.") + +PARSER.add_argument( + "--select", "-s", default=_Default(''), type=split_csp_str, + help="Select errors and warnings. (comma-separated)") + + +PARSER.add_argument( + "--linters", "-l", default=_Default(','.join(DEFAULT_LINTERS)), + type=parse_linters, help=( + "Select linters. (comma-separated). Choices are %s." + % ','.join(s for s in LINTERS.keys()) + )) + +PARSER.add_argument( + "--ignore", "-i", default=_Default(''), type=split_csp_str, + help="Ignore errors and warnings. (comma-separated)") + +PARSER.add_argument( + "--skip", default=_Default(''), + type=lambda s: [re(fnmatch.translate(p)) for p in s.split(',') if p], + help="Skip files by masks (comma-separated, Ex. */messages.py)") + +PARSER.add_argument("--report", "-r", help="Filename for report.") +PARSER.add_argument( + "--hook", action="store_true", help="Install Git (Mercurial) hook.") + +PARSER.add_argument( + "--async", action="store_true", + help="Enable async mode. Usefull for checking a lot of files. " + "Dont supported with pylint.") + +PARSER.add_argument( + "--options", "-o", default=_Default(DEFAULT_INI_PATH), + help="Select configuration file. By default is '/pylama.ini'") + + +ACTIONS = dict((a.dest, a) for a in PARSER._actions) + + +def parse_options(args=None, **overrides): # noqa """ Parse options from command line and configuration files. :return argparse.Namespace: """ - # Parse args from command string - parser = get_parser() - actions = dict((a.dest, a) for a in parser._actions) - options = Options( - async=_Default(async), format=_Default('pep8'), - select=_Default(select), ignore=_Default(ignore), - report=_Default(None), verbose=_Default(False), - linters=_Default(','.join(linters)), options=_Default(options)) + if args is None: + args = [] - if not args is None: - options = parser.parse_args(args) + # Parse args from command string + options = PARSER.parse_args(args) # Parse options from ini file - config = get_config(str(options.options)) + cfg = get_config(str(options.options)) # Compile options from ini - for k, v in config.default.items(): - value = getattr(options, k, _Default(None)) - if not isinstance(value, _Default): - continue - - action = actions.get(k) + for k, v in cfg.default.items(): LOGGER.info('Find option %s (%s)', k, v) - name, value = action.dest, action.type(v)\ - if callable(action.type) else v - if action.const: - value = bool(int(value)) - setattr(options, name, value) + passed_value = getattr(options, k, _Default()) + if isinstance(passed_value, _Default): + setattr(options, k, _Default(v)) + + # Override options + for k, v in overrides.items(): + passed_value = getattr(options, k, _Default()) + if isinstance(passed_value, _Default): + setattr(options, k, _Default(v)) # Postprocess options opts = dict(options.__dict__.items()) for name, value in opts.items(): if isinstance(value, _Default): - action = actions.get(name) - if action and callable(action.type): - value.value = action.type(value.value) - - setattr(options, name, value.value) + setattr(options, name, process_value(name, value.value)) # Parse file related options options.file_params = dict() options.linter_params = dict() - for k, s in config.sections.items(): - if k == config.default_section: + for k, s in cfg.sections.items(): + if k == cfg.default_section: continue if k in LINTERS: options.linter_params[k] = dict(s) @@ -85,84 +168,19 @@ def parse_options( return options -def setup_logger(options): - """ Setup logger with options. """ - - LOGGER.setLevel(logging.INFO if options.verbose else logging.WARN) - if options.report: - LOGGER.removeHandler(STREAM) - LOGGER.addHandler(logging.FileHandler(options.report, mode='w')) - LOGGER.info('Try to read configuration from: ' + options.options) - +def process_value(name, value): + """ Compile option value. """ + action = ACTIONS.get(name) + if not action: + return value -def get_parser(): - """ Make command parser for pylama. + if callable(action.type): + return action.type(value) - :return ArgumentParser: + if action.const: + return bool(int(value)) - """ - split_csp_str = lambda s: list( - set(i for i in s.strip().split(',') if i)) - - parser = ArgumentParser(description="Code audit tool for python.") - parser.add_argument( - "path", nargs='?', default=_Default(CURDIR), - help="Path on file or directory.") - - parser.add_argument( - "--verbose", "-v", action='store_true', help="Verbose mode.") - - parser.add_argument('--version', action='version', - version='%(prog)s ' + version) - - parser.add_argument( - "--format", "-f", default=_Default('pep8'), choices=['pep8', 'pylint'], - help="Error format.") - - parser.add_argument( - "--select", "-s", default=_Default(''), type=split_csp_str, - help="Select errors and warnings. (comma-separated)") - - def parse_linters(csp_str): - result = list() - for name in split_csp_str(csp_str): - linter = LINTERS.get(name) - if linter: - result.append((name, linter)) - else: - logging.warn("Linter `%s` not found.", name) - return result - - parser.add_argument( - "--linters", "-l", default=_Default(','.join(DEFAULT_LINTERS)), - type=parse_linters, help=( - "Select linters. (comma-separated). Choices are %s." - % ','.join(s for s in LINTERS.keys()) - )) - - parser.add_argument( - "--ignore", "-i", default=_Default(''), type=split_csp_str, - help="Ignore errors and warnings. (comma-separated)") - - parser.add_argument( - "--skip", default=_Default(''), - type=lambda s: [re(fnmatch.translate(p)) for p in s.split(',') if p], - help="Skip files by masks (comma-separated, Ex. */messages.py)") - - parser.add_argument("--report", "-r", help="Filename for report.") - parser.add_argument( - "--hook", action="store_true", help="Install Git (Mercurial) hook.") - - parser.add_argument( - "--async", action="store_true", - help="Enable async mode. Usefull for checking a lot of files. " - "Dont supported with pylint.") - - parser.add_argument( - "--options", "-o", default=_Default(DEFAULT_INI_PATH), - help="Select configuration file. By default is '/pylama.ini'") - - return parser + return value def get_config(ini_path=DEFAULT_INI_PATH): @@ -178,16 +196,10 @@ def get_config(ini_path=DEFAULT_INI_PATH): return config -class _Default(object): - - def __init__(self, value): - self.value = value - - def __getattr__(self, name): - return getattr(self.value, name) - - def __str__(self): - return str(self.value) - - -# lint_ignore=R0914,W0212,E1103,C901 +def setup_logger(options): + """ Setup logger with options. """ + LOGGER.setLevel(logging.INFO if options.verbose else logging.WARN) + if options.report: + LOGGER.removeHandler(STREAM) + LOGGER.addHandler(logging.FileHandler(options.report, mode='w')) + LOGGER.info('Try to read configuration from: ' + options.options) diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 7c5c8c4e..222c5f77 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -3,10 +3,11 @@ Prepare params, check a modeline and run the checkers. """ -import logging import re -import sys +import logging + +from .config import process_value, LOGGER from .lint.extensions import LINTERS @@ -15,13 +16,8 @@ # Parse a modelines MODELINE_RE = re.compile( - r'^\s*#\s+(?:pymode\:)?((?:lint[\w_]*=[^:\n\s]+:?)+)', re.I | re.M) - -# Setup a logger -LOGGER = logging.getLogger('pylama') -LOGGER.propagate = False -STREAM = logging.StreamHandler(sys.stdout) -LOGGER.addHandler(STREAM) + r'^\s*#\s+(?:pylama:)\s*((?:[\w_]*=[^:\n\s]+:?)+)', + re.I | re.M) def run(path, code=None, options=None): @@ -32,19 +28,17 @@ def run(path, code=None, options=None): """ errors = [] params = dict(ignore=options.ignore, select=options.select) - config = dict() + fileconfig = dict() for mask in options.file_params: if mask.match(path): - config.update(options.file_params[mask]) + fileconfig.update(options.file_params[mask]) try: with CodeContext(code, path) as ctx: code = ctx.code - params = prepare_params( - parse_modeline(code), config, ignore=options.ignore, - select=options.select) + params = prepare_params(parse_modeline(code), fileconfig, options) - if not params['lint']: + if params.get('skip'): return errors for item in options.linters: @@ -108,26 +102,22 @@ def parse_modeline(code): return dict() -def prepare_params(*configs, **params): +def prepare_params(modeline, fileconfig, options): """ Prepare and merge a params from modelines and configs. :return dict: """ - params['ignore'] = list(params.get('ignore') or []) - params['select'] = list(params.get('select') or []) + params = dict(ignore=options.ignore, select=options.select, skip=False) - for config in filter(None, configs): + for config in filter(None, [modeline, fileconfig]): for key in ('ignore', 'select'): - config.setdefault(key, config.get('lint_' + key, [])) - if not isinstance(config[key], list): - config[key] = config[key].split(',') - params[key] += config[key] - params['lint'] = config.get('lint', 1) + params[key] += process_value(key, config.get(key, [])) + params['skip'] = bool(int(config.get('skip', False))) params['ignore'] = set(params['ignore']) params['select'] = set(params['select']) - params.setdefault('lint', 1) + return params @@ -176,17 +166,20 @@ class CodeContext(object): """ Read file if code is None. """ def __init__(self, code, path): + """ Init context. """ self.code = code self.path = path self._file = None def __enter__(self): + """ Open file and read a code. """ if self.code is None: self._file = open(self.path, 'rU') self.code = self._file.read() return self def __exit__(self, t, value, traceback): + """ Close opened file. """ if not self._file is None: self._file.close() diff --git a/pymode/libs/pylama/hook.py b/pymode/libs/pylama/hook.py index a3cac2ec..cd1961ec 100644 --- a/pymode/libs/pylama/hook.py +++ b/pymode/libs/pylama/hook.py @@ -40,7 +40,6 @@ def git_hook(): def hg_hook(ui, repo, node=None, **kwargs): """ Run pylama after mercurial commit. """ - from .main import check_files seen = set() paths = [] @@ -74,7 +73,6 @@ def install_git(path): def install_hg(path): """ Install hook in Mercurial repository. """ - hook = op.join(path, 'hgrc') if not op.isfile(hook): open(hook, 'w+').close() @@ -95,7 +93,6 @@ def install_hg(path): def install_hook(path): """ Auto definition of SCM and hook installation. """ - git = op.join(path, '.git', 'hooks') hg = op.join(path, '.hg') if op.exists(git): diff --git a/pymode/libs/pylama/lint/pylama_pep257/__init__.py b/pymode/libs/pylama/lint/pylama_pep257/__init__.py index 1deef476..99474666 100644 --- a/pymode/libs/pylama/lint/pylama_pep257/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep257/__init__.py @@ -14,14 +14,13 @@ def run(path, code=None, **meta): :return list: List of errors. """ - from .pep257 import check_source + from .pep257 import PEP257Checker errors = [] - for er in check_source(code, path): + for er in PEP257Checker().check_source(code, path): errors.append(dict( lnum=er.line, - col=er.char, - text='C0110 %s' % er.explanation.split('\n')[0].strip(), - type='W', + text=er.message, + type='D', )) return errors diff --git a/pymode/libs/pylama/lint/pylama_pep257/pep257.py b/pymode/libs/pylama/lint/pylama_pep257/pep257.py index a43dbae7..69a8fea2 100644 --- a/pymode/libs/pylama/lint/pylama_pep257/pep257.py +++ b/pymode/libs/pylama/lint/pylama_pep257/pep257.py @@ -1,699 +1,722 @@ #! /usr/bin/env python """Static analysis tool for checking docstring conventions and style. -About ------ - -Currently implemented checks cover most of PEP257: +Implemented checks cover PEP257: http://www.python.org/dev/peps/pep-0257/ -After PEP257 is covered and tested, other checks might be added, -e.g. NumPy docstring conventions is the first candidate: +Other checks can be added, e.g. NumPy docstring conventions: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt -The main repository of this program is located at: +The repository is located at: http://github.com/GreenSteam/pep257 -Creating own checks -------------------- - -In order to add your own check, create a function in "Checks functions" -section below. The function should take 3 parameters: - -docstring : str - Docstring to check, as it is in file (with quotes). -context : str - Docstring's context (e.g. function's source code). -is_script : bool - Whether the docstring is script with #! or not. - -Depending on 1st parameter name, the function will be called with -different type of docstring: - - * module_docstring - * function_docstring - * class_docstring - * method_docstring - * def_docstring (i.e. function-docstrings + method-docstrings) - * docstring (i.e. all above docstring types) - -E.g. the following function will be fed only class-docstrings: - - def your_check(class_docstring, context, is_script): - pass - -If for a certain function, class, etc. a docstring does not exist, -then `None` will be passed, which should be taken into account. - -To signify that a check passed successfully simply `return` from the -check function. If a check failed, return `True`. If a check failed -and you can provide the precise position where it failed, return a -tuple (start_position, end_position), where start and end positions -are integers specifying where in `context` the failure occured. - -Also, see examples in "Check functions" section. - """ -__version__ = '0.2.4' +from __future__ import with_statement -from curses.ascii import isascii -import inspect -from optparse import OptionParser -from os import walk -from os.path import abspath, basename, expanduser, isdir, isfile -from os.path import join as path_join -import re +import os import sys import tokenize as tk +from itertools import takewhile, dropwhile, chain +from optparse import OptionParser +from re import compile as re try: from StringIO import StringIO -except ImportError: - # Python 3.0 and later +except ImportError: # Python 3.0 and later from io import StringIO -try: - all - any -except NameError: - # Python 2.4 and earlier - def all(iterable): - for element in iterable: - if not element: - return False - return True - - def any(iterable): - for element in iterable: - if element: - return True - return False - - try: next -except NameError: - # Python 2.5 and earlier - def next(obj): - return obj.next() - - -# -# Helper functions -# - -def cached(f): - """A decorator that caches function results. - - No cache expiration is currently done. - - """ - cache = {} - - def cached_func(*args, **kwargs): - key = (args, tuple(kwargs.items())) - if key in cache: - return cache[key] - else: - res = f(*args, **kwargs) - cache[key] = res - return res - return cached_func - - -def yield_list(f): - """Convert generator into list-returning function (decorator).""" - return lambda *arg, **kw: list(f(*arg, **kw)) +except NameError: # Python 2.5 and earlier + nothing = object() - -def remove_comments(s): - return re.sub('#[^\n]', '', s) - - -def abs_pos(marker, source): - """Return absolute position in source given (line, character) marker.""" - line, char = marker - lines = StringIO(source).readlines() - return len(''.join(lines[:line - 1])) + char - - -def rel_pos(abs_pos, source): - """Given absolute position, return relative (line, character) in source.""" - lines = StringIO(source).readlines() - nchars = len(source) - assert nchars >= abs_pos - while nchars > abs_pos: - assert nchars >= abs_pos - nchars -= len(lines[-1]) - lines.pop() - return len(lines) + 1, abs_pos - len(''.join(lines)) - - -def get_summary_line_info(thedocstring): - """Get the (summary_line, line_number) tuple for the given docstring. - - The returned 'summary_line' is the pep257 summary line and 'line_number' is - the zero-based docstring line number containing the summary line, which - will be either 0 (zeroth line) or 1 (first line). Any docstring checks - relating to the summary line should use this method to ensure consistent - treatment of the summary line. - - """ - lines = eval(thedocstring).split('\n') - first_line = lines[0].strip() - if len(lines) == 1 or len(first_line) > 0: - return first_line, 0 - return lines[1].strip(), 1 - - -# -# Parsing -# - - -def parse_module_docstring(source): - for kind, value, _, _, _ in tk.generate_tokens(StringIO(source).readline): - if kind in [tk.COMMENT, tk.NEWLINE, tk.NL]: - continue - elif kind == tk.STRING: # first STRING should be docstring - return value + def next(obj, default=nothing): + if default == nothing: + return obj.next() else: - return None + try: + return obj.next() + except StopIteration: + return default -def parse_docstring(source, what=''): - """Parse docstring given `def` or `class` source.""" - module_docstring = parse_module_docstring(source) - if what.startswith('module'): - return module_docstring - if module_docstring: - return module_docstring - token_gen = tk.generate_tokens(StringIO(source).readline) - try: - kind = None - while kind != tk.INDENT: - kind, _, _, _, _ = next(token_gen) - kind, value, _, _, _ = next(token_gen) - if kind == tk.STRING: # STRING after INDENT is a docstring - return value - except StopIteration: - pass +__version__ = '0.3.3-alpha' +__all__ = ('check', 'collect') -@yield_list -def parse_top_level(source, keyword): - """Parse top-level functions or classes.""" - token_gen = tk.generate_tokens(StringIO(source).readline) - kind, value, char = None, None, None - while True: - start, end = None, None - while not (kind == tk.NAME and value == keyword and char == 0): - kind, value, (line, char), _, _ = next(token_gen) - start = line, char - while not (kind == tk.DEDENT and value == '' and char == 0): - kind, value, (line, char), _, _ = next(token_gen) - end = line, char - yield source[abs_pos(start, source): abs_pos(end, source)] - - -@cached -def parse_functions(source): - return parse_top_level(source, 'def') - - -@cached -def parse_classes(source): - return parse_top_level(source, 'class') - - -def skip_indented_block(token_gen): - kind, value, start, end, raw = next(token_gen) - while kind != tk.INDENT: - kind, value, start, end, raw = next(token_gen) - indent = 1 - for kind, value, start, end, raw in token_gen: - if kind == tk.INDENT: - indent += 1 - elif kind == tk.DEDENT: - indent -= 1 - if indent == 0: - return kind, value, start, end, raw - - -@cached -@yield_list -def parse_methods(source): - source = ''.join(parse_classes(source)) - token_gen = tk.generate_tokens(StringIO(source).readline) - kind, value, char = None, None, None - while True: - start, end = None, None - while not (kind == tk.NAME and value == 'def'): - kind, value, (line, char), _, _ = next(token_gen) - start = line, char - kind, value, (line, char), _, _ = skip_indented_block(token_gen) - end = line, char - yield source[abs_pos(start, source): abs_pos(end, source)] - - -def parse_contexts(source, kind): - if kind == 'module_docstring': - return [source] - if kind == 'function_docstring': - return parse_functions(source) - if kind == 'class_docstring': - return parse_classes(source) - if kind == 'method_docstring': - return parse_methods(source) - if kind == 'def_docstring': - return parse_functions(source) + parse_methods(source) - if kind == 'docstring': - return ([parse_module_docstring(source)] + parse_functions(source) + - parse_classes(source) + parse_methods(source)) - - -# -# Framework -# +humanize = lambda string: re(r'(.)([A-Z]+)').sub(r'\1 \2', string).lower() +is_magic = lambda name: name.startswith('__') and name.endswith('__') +is_ascii = lambda string: all(ord(char) < 128 for char in string) +is_blank = lambda string: not string.strip() +leading_space = lambda string: re('\s*').match(string).group() -class Error(object): +class Value(object): - """Error in docstring style. + __init__ = lambda self, *args: vars(self).update(zip(self._fields, args)) + __hash__ = lambda self: hash(repr(self)) + __eq__ = lambda self, other: other and vars(self) == vars(other) - * Stores relevant data about the error, - * provides format for printing an error, - * provides __lt__ method to sort errors. + def __repr__(self): + args = [vars(self)[field] for field in self._fields] + return '%s(%s)' % (self.__class__.__name__, ', '.join(map(repr, args))) - """ - # options that define how errors are printed - explain = False - range = False - quote = False +class Definition(Value): - def __init__(self, filename, source, docstring, context, - explanation, start=None, end=None): - self.filename = filename - self.source = source - self.docstring = docstring - self.context = context - self.explanation = explanation.strip() + _fields = 'name _source start end docstring children parent'.split() - if start is None: - self.start = source.find(context) + context.find(docstring) - else: - self.start = source.find(context) + start - self.line, self.char = rel_pos(self.start, self.source) + _human = property(lambda self: humanize(type(self).__name__)) + kind = property(lambda self: self._human.split()[-1]) + module = property(lambda self: self.parent.module) + all = property(lambda self: self.module.all) + _slice = property(lambda self: slice(self.start - 1, self.end)) + source = property(lambda self: ''.join(self._source[self._slice])) + __iter__ = lambda self: chain([self], *self.children) - if end is None: - self.end = self.start + len(docstring) - else: - self.end = source.find(context) + end - self.end_line, self.end_char = rel_pos(self.end, self.source) + @property + def _publicity(self): + return {True: 'public', False: 'private'}[self.is_public] def __str__(self): - s = self.filename + ':%d:%d' % (self.line, self.char) - if self.range: - s += '..%d:%d' % (self.end_line, self.end_char) - if self.explain: - s += ': ' + self.explanation + '\n' - else: - s += ': ' + self.explanation.split('\n')[0].strip() - if self.quote: - quote = self.source[self.start:self.end].strip() - s += '\n> ' + '\n> '.join(quote.split('\n')) + '\n' - return s - - def __lt__(self, other): - return (self.filename, self.start) < (other.filename, other.start) - - -@yield_list -def find_checks(keyword): - for function in globals().values(): - if inspect.isfunction(function): - args = inspect.getargspec(function)[0] - if args and args[0] == keyword: - yield function - - -@yield_list -def check_source(source, filename): - keywords = ['module_docstring', 'function_docstring', - 'class_docstring', 'method_docstring', - 'def_docstring', 'docstring'] # TODO? 'nested_docstring'] - is_script = source.startswith('#!') or \ - basename(filename).startswith('test_') - for keyword in keywords: - for check in find_checks(keyword): - for context in parse_contexts(source, keyword): - docstring = parse_docstring(context, keyword) - result = check(docstring, context, is_script) - if result: - positions = [] if result is True else result - yield Error(filename, source, docstring, context, - check.__doc__, *positions) - - -def find_input_files(filenames): - """ Return a list of input files. - - `filenames` is a list of filenames, which may be either files - or directories. Files within subdirectories are added - recursively. - - """ - input_files = [] + return 'in %s %s `%s`' % (self._publicity, self._human, self.name) - filenames = [abspath(expanduser(f)) for f in filenames] - for filename in filenames: - if isdir(filename): - for root, _dirs, files in walk(filename): - input_files += [path_join(root, f) for f in sorted(files) - if f.endswith(".py")] - elif isfile(filename): - input_files += [filename] - else: - print_error("%s is not a file or directory" % filename) - return input_files +class Module(Definition): + _fields = 'name _source start end docstring children parent _all'.split() + is_public = True + _nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s]) + module = property(lambda self: self) + all = property(lambda self: self._all) + __str__ = lambda self: 'at module level' -def check_files(filenames): - r"""Return list of docstring style errors found in files. - Example - ------- - >>> import pep257 - >>> pep257.check_files(['one.py', 'two.py']) - ['one.py:23:1 PEP257 Use u\"\"\" for Unicode docstrings.'] +class Function(Definition): - """ - errors = [] - for filename in find_input_files(filenames): - errors.extend(check_source(open(filename).read(), filename)) - return [str(e) for e in errors] + _nest = staticmethod(lambda s: {'def': NestedFunction, + 'class': NestedClass}[s]) + @property + def is_public(self): + if self.all is not None: + return self.name in self.all + else: # TODO: are there any magic functions? not methods + return not self.name.startswith('_') or is_magic(self.name) -def parse_options(): - parser = OptionParser(version=__version__) - parser.add_option('-e', '--explain', action='store_true', - help='show explanation of each error') - parser.add_option('-r', '--range', action='store_true', - help='show error start..end positions') - parser.add_option('-q', '--quote', action='store_true', - help='quote erroneous lines') - return parser.parse_args() +class NestedFunction(Function): -def print_error(message): - sys.stderr.write(message) - sys.stderr.write('\n') - sys.stderr.flush() + is_public = False -def main(options, arguments): - print('=' * 80) - print('Note: checks are relaxed for scripts (with #!) compared to modules') - Error.explain = options.explain - Error.range = options.range - Error.quote = options.quote - errors = [] +class Method(Function): - for filename in find_input_files(arguments): - try: - f = open(filename) - except IOError: - print_error("Error opening file %s" % filename) - else: - try: - errors.extend(check_source(f.read(), filename)) - except IOError: - print_error("Error reading file %s" % filename) - except tk.TokenError: - print_error("Error parsing file %s" % filename) - finally: - f.close() - for error in sorted(errors): - print_error(str(error)) - return 1 if errors else 0 + @property + def is_public(self): + name_is_public = not self.name.startswith('_') or is_magic(self.name) + return self.parent.is_public and name_is_public -# -# Check functions -# +class Class(Definition): + _nest = staticmethod(lambda s: {'def': Method, 'class': NestedClass}[s]) + is_public = Function.is_public -def check_modules_have_docstrings(module_docstring, context, is_script): - """All modules should have docstrings. - All modules should normally have docstrings. +class NestedClass(Class): - """ - if not module_docstring: # or not eval(module_docstring).strip(): - return 0, min(79, len(context)) - if not eval(module_docstring).strip(): - return True + is_public = False -def check_def_has_docstring(def_docstring, context, is_script): - """Exported definitions should have docstrings. +class Token(Value): - ...all functions and classes exported by a module should also have - docstrings. Public methods (including the __init__ constructor) - should also have docstrings. - - """ - if is_script: - return # assume nothing is exported - def_name = context.split()[1] - if def_name.startswith('_') and not def_name.endswith('__'): - return # private, not exported - if not def_docstring: - return 0, len(context.split('\n')[0]) - if not eval(def_docstring).strip(): - return True + _fields = 'kind value start end source'.split() -def check_class_has_docstring(class_docstring, context, is_script): - """Exported classes should have docstrings. +class TokenStream(object): - ...all functions and classes exported by a module should also have - docstrings. + def __init__(self, filelike): + self._generator = tk.generate_tokens(filelike.readline) + self.current = Token(*next(self._generator, None)) + self.line = self.current.start[0] - """ - if is_script: - return # assume nothing is exported - class_name = context.split()[1] - if class_name.startswith('_'): - return # not exported - if not class_docstring: - return 0, len(context.split('\n')[0]) - if not eval(class_docstring).strip(): - return True + def move(self): + previous = self.current + current = next(self._generator, None) + self.current = None if current is None else Token(*current) + self.line = self.current.start[0] if self.current else self.line + return previous + def __iter__(self): + while True: + if self.current is not None: + yield self.current + else: + return + self.move() -def check_triple_double_quotes(docstring, context, is_script): - r"""Use \"\"\"triple double quotes\"\"\". - For consistency, always use \"\"\"triple double quotes\"\"\" around - docstrings. Use r\"\"\"raw triple double quotes\"\"\" if you use any - backslashes in your docstrings. For Unicode docstrings, use - u\"\"\"Unicode triple-quoted strings\"\"\". +class AllError(Exception): - """ - if docstring and not (docstring.startswith('"""') or - docstring.startswith('r"""') or - docstring.startswith('u"""')): - return True + def __init__(self, message): + Exception.__init__( + self, message + + 'That means pep257 cannot decide which definitions are public. ' + 'Variable __all__ should be present at most once in each file, ' + "in form `__all__ = ('a_public_function', 'APublicClass', ...)`. " + 'More info on __all__: http://stackoverflow.com/q/44834/. ') -def check_backslashes(docstring, context, is_script): - r"""Use r\"\"\" if any backslashes in your docstrings. +class Parser(object): - Use r\"\"\"raw triple double quotes\"\"\" if you use any backslashes - (\\) in your docstrings. - - """ - if docstring and "\\" in docstring and not docstring.startswith('r"""'): - return True + def __call__(self, filelike, filename): + self.source = filelike.readlines() + src = ''.join(self.source) + self.stream = TokenStream(StringIO(src)) + self.filename = filename + self.all = None + return self.parse_module() + + current = property(lambda self: self.stream.current) + line = property(lambda self: self.stream.line) + + def consume(self, kind): + assert self.stream.move().kind == kind + + def leapfrog(self, kind): + for token in self.stream: + if token.kind == kind: + self.consume(kind) + return + + def parse_docstring(self): + for token in self.stream: + if token.kind in [tk.COMMENT, tk.NEWLINE, tk.NL]: + continue + elif token.kind == tk.STRING: + return token.value + else: + return None + + def parse_definitions(self, class_, all=False): + for token in self.stream: + if all and token.value == '__all__': + self.parse_all() + if token.value in ['def', 'class']: + yield self.parse_definition(class_._nest(token.value)) + if token.kind == tk.INDENT: + self.consume(tk.INDENT) + for definition in self.parse_definitions(class_): + yield definition + if token.kind == tk.DEDENT: + return + + def parse_all(self): + assert self.current.value == '__all__' + self.consume(tk.NAME) + if self.current.value != '=': + raise AllError('Could not evaluate contents of __all__. ') + self.consume(tk.OP) + if self.current.value not in '([': + raise AllError('Could not evaluate contents of __all__. ') + if self.current.value == '[': + msg = ("%s WARNING: __all__ is defined as a list, this means " + "pep257 cannot reliably detect contents of the __all__ " + "variable, because it can be mutated. Change __all__ to be " + "an (immutable) tuple, to remove this warning. Note, " + "pep257 uses __all__ to detect which definitions are " + "public, to warn if public definitions are missing " + "docstrings. If __all__ is a (mutable) list, pep257 cannot " + "reliably assume its contents. pep257 will proceed " + "assuming __all__ is not mutated.\n" % self.filename) + sys.stderr.write(msg) + self.consume(tk.OP) + s = '(' + if self.current.kind != tk.STRING: + raise AllError('Could not evaluate contents of __all__. ') + while self.current.value not in ')]': + s += self.current.value + self.stream.move() + s += ')' + try: + self.all = eval(s, {}) + except BaseException: + raise AllError('Could not evaluate contents of __all__: %s. ' % s) + + def parse_module(self): + start = self.line + docstring = self.parse_docstring() + children = list(self.parse_definitions(Module, all=True)) + assert self.current is None + end = self.line + module = Module(self.filename, self.source, start, end, + docstring, children, None, self.all) + for child in module.children: + child.parent = module + return module + + def parse_definition(self, class_): + start = self.line + self.consume(tk.NAME) + name = self.current.value + self.leapfrog(tk.INDENT) + assert self.current.kind != tk.INDENT + docstring = self.parse_docstring() + children = list(self.parse_definitions(class_)) + assert self.current.kind == tk.DEDENT + end = self.line - 1 + definition = class_(name, self.source, start, end, + docstring, children, None) + for child in definition.children: + child.parent = definition + return definition -def check_unicode_docstring(docstring, context, is_script): - r"""Use u\"\"\" for Unicode docstrings. +class Error(object): - For Unicode docstrings, use u\"\"\"Unicode triple-quoted stringsr\"\"\". + """Error in docstring style.""" - """ - if (docstring and not all(isascii(char) for char in docstring) and - not docstring.startswith('u"""')): - return True + # Options that define how errors are printed: + explain = False + source = False + + def __init__(self, message=None, final=False): + self.message, self.is_final = message, final + self.definition, self.explanation = [None, None] + + code = property(lambda self: self.message.partition(':')[0]) + filename = property(lambda self: self.definition.module.name) + line = property(lambda self: self.definition.start) + + @property + def lines(self): + source = '' + lines = self.definition._source[self.definition._slice] + offset = self.definition.start + lines_stripped = list(reversed(list(dropwhile(is_blank, + reversed(lines))))) + numbers_width = 0 + for n, line in enumerate(lines_stripped): + numbers_width = max(numbers_width, n + offset) + numbers_width = len(str(numbers_width)) + numbers_width = 6 + for n, line in enumerate(lines_stripped): + source += '%*d: %s' % (numbers_width, n + offset, line) + if n > 5: + source += ' ...\n' + break + return source + def __str__(self): + self.explanation = '\n'.join(l for l in self.explanation.split('\n') + if not is_blank(l)) + template = '%(filename)s:%(line)s %(definition)s:\n %(message)s' + if self.source and self.explain: + template += '\n\n%(explanation)s\n\n%(lines)s\n' + elif self.source and not self.explain: + template += '\n\n%(lines)s\n' + elif self.explain and not self.source: + template += '\n\n%(explanation)s\n\n' + return template % dict((name, getattr(self, name)) for name in + ['filename', 'line', 'definition', 'message', + 'explanation', 'lines']) + + __repr__ = __str__ -def check_one_liners(docstring, context, is_script): - """One-liner docstrings should fit on one line with quotes. + def __lt__(self, other): + return (self.filename, self.line) < (other.filename, other.line) - The closing quotes are on the same line as the opening quotes. - This looks better for one-liners. - """ - if not docstring: - return - lines = docstring.split('\n') - if len(lines) > 1: - non_empty = [l for l in lines if any([c.isalpha() for c in l])] - if len(non_empty) == 1: - return True +def parse_options(): + parser = OptionParser(version=__version__, + usage='Usage: pep257 [options] [...]') + option = parser.add_option + option('-e', '--explain', action='store_true', + help='show explanation of each error') + option('-s', '--source', action='store_true', + help='show source for each error') + option('--ignore', metavar='', default='', + help='ignore a list comma-separated error codes, ' + 'for example: --ignore=D101,D202') + option('--match', metavar='', default='(?!test_).*\.py', + help="check only files that exactly match regular " + "expression; default is --match='(?!test_).*\.py' which " + "matches files that don't start with 'test_' but end with " + "'.py'") + option('--match-dir', metavar='', default='[^\.].*', + help="search only dirs that exactly match regular " + "expression; default is --match-dir='[^\.].*', which matches " + "all dirs that don't start with a dot") + return parser.parse_args() -def check_no_blank_before(def_docstring, context, is_script): - """No blank line before docstring in definitions. +def collect(names, match=lambda name: True, match_dir=lambda name: True): + """Walk dir trees under `names` and generate filnames that `match`. - There's no blank line either before or after the docstring. + Example + ------- + >>> sorted(collect(['non-dir.txt', './'], + ... match=lambda name: name.endswith('.py'))) + ['non-dir.txt', './pep257.py', './setup.py', './test_pep257.py'] """ - if not def_docstring: - return - before = remove_comments(context.split(def_docstring)[0]) - if before.split(':')[-1].count('\n') > 1: - return True - - -def check_ends_with_period(docstring, context, is_script): - """First line should end with a period. - - The [first line of a] docstring is a phrase ending in a period. + for name in names: # map(expanduser, names): + if os.path.isdir(name): + for root, dirs, filenames in os.walk(name): + for dir in dirs: + if not match_dir(dir): + dirs.remove(dir) # do not visit those dirs + for filename in filenames: + if match(filename): + yield os.path.join(root, filename) + else: + yield name - """ - if not docstring: - return - (summary_line, line_number) = get_summary_line_info(docstring) - if not summary_line.endswith('.'): - return True +def check(filenames, ignore=()): + """Generate PEP 257 errors that exist in `filenames` iterable. -def check_imperative_mood(def_docstring, context, is_script): - """First line should be in imperative mood ('Do', not 'Does'). + Skips errors with error-codes defined in `ignore` iterable. - [Docstring] prescribes the function or method's effect as a command: - ("Do this", "Return that"), not as a description; e.g. don't write - "Returns the pathname ...". + Example + ------- + >>> check(['pep257.py'], ignore=['D100']) + """ - if def_docstring and eval(def_docstring).strip(): - first_word = eval(def_docstring).strip().split()[0] - if first_word.endswith('s') and not first_word.endswith('ss'): - return True - - -def check_no_signature(def_docstring, context, is_script): - """First line should not be function's or method's "signature". + for filename in filenames: + try: + with open(filename) as file: + source = file.read() + for error in PEP257Checker().check_source(source, filename): + code = getattr(error, 'code', None) + if code is not None and code not in ignore: + yield error + except (EnvironmentError, AllError): + yield sys.exc_info()[1] + except tk.TokenError: + yield SyntaxError('invalid syntax in file %s' % filename) - The one-line docstring should NOT be a "signature" reiterating - the function/method parameters (which can be obtained by introspection). - """ - if not def_docstring: - return - def_name = context.split(def_docstring)[0].split()[1].split('(')[0] - first_line = eval(def_docstring).split('\n')[0] - if def_name + '(' in first_line.replace(' ', ''): - return True +def main(options, arguments): + Error.explain = options.explain + Error.source = options.source + collected = collect(arguments or ['.'], + match=re(options.match + '$').match, + match_dir=re(options.match_dir + '$').match) + code = 0 + for error in check(collected, ignore=options.ignore.split(',')): + sys.stderr.write('%s\n' % error) + code = 1 + return code -def check_return_type(def_docstring, context, is_script): - """Return value type should be mentioned. +parse = Parser() - However, the nature of the return value cannot be determined by - introspection, so it should be mentioned. - """ - if (not def_docstring) or is_script: - return - if 'return' not in def_docstring.lower(): - tokens = list(tk.generate_tokens(StringIO(context).readline)) - after_return = [tokens[i + 1][0] for i, token in enumerate(tokens) - if token[1] == 'return'] - # not very precise (tk.OP ';' is not taken into account) - if set(after_return) - set([tk.COMMENT, tk.NL, tk.NEWLINE]) != set([]): - return True - - -def check_blank_after_summary(docstring, context, is_script): - """Blank line missing after one-line summary. - - Multi-line docstrings consist of a summary line just like a one-line - docstring, followed by a blank line, followed by a more elaborate - description. The summary line may be used by automatic indexing tools; - it is important that it fits on one line and is separated from the - rest of the docstring by a blank line. +def check_for(kind, terminal=False): + def decorator(f): + f._check_for = kind + f._terminal = terminal + return f + return decorator - """ - if not docstring: - return - lines = eval(docstring).split('\n') - if len(lines) > 1: - (summary_line, line_number) = get_summary_line_info(docstring) - if len(lines) <= (line_number+1) or lines[line_number+1].strip() != '': - return True +class PEP257Checker(object): -def check_indent(docstring, context, is_script): - """The entire docstring should be indented same as code. + """Checker for PEP 257. - The entire docstring is indented the same as the quotes at its - first line. + D10x: Missing docstrings + D20x: Whitespace issues + D30x: Docstring formatting + D40x: Docstring content issues """ - if (not docstring) or len(eval(docstring).split('\n')) == 1: - return - non_empty_lines = [line for line in eval(docstring).split('\n')[1:] - if line.strip()] - if not non_empty_lines: - return - indent = min([len(l) - len(l.lstrip()) for l in non_empty_lines]) - if indent != len(context.split(docstring)[0].split('\n')[-1]): - return True - - -def check_blank_before_after_class(class_docstring, context, is_script): - """Class docstring should have 1 blank line around them. - - Insert a blank line before and after all docstrings (one-line or - multi-line) that document a class -- generally speaking, the class's - methods are separated from each other by a single blank line, and the - docstring needs to be offset from the first method by a blank line; - for symmetry, put a blank line between the class header and the - docstring. - """ - if not class_docstring: - return - before, after = context.split(class_docstring)[:2] - before_blanks = [not line.strip() for line in before.split('\n')] - after_blanks = [not line.strip() for line in after.split('\n')] - if before_blanks[-3:] != [False, True, True]: - return True - if not all(after_blanks) and after_blanks[:3] != [True, True, False]: - return True - - -def check_blank_after_last_paragraph(docstring, context, is_script): - """Multiline docstring should end with 1 blank line. - - The BDFL recommends inserting a blank line between the last - paragraph in a multi-line docstring and its closing quotes, - placing the closing quotes on a line by themselves. - - """ - if (not docstring) or len(eval(docstring).split('\n')) == 1: - return - blanks = [not line.strip() for line in eval(docstring).split('\n')] - if blanks[-3:] != [False, True, True]: - return True + def check_source(self, source, filename): + module = parse(StringIO(source), filename) + for definition in module: + for check in self.checks: + terminate = False + if isinstance(definition, check._check_for): + error = check(None, definition, definition.docstring) + errors = error if hasattr(error, '__iter__') else [error] + for error in errors: + if error is not None: + partition = check.__doc__.partition('.\n') + message, _, explanation = partition + if error.message is None: + error.message = message + error.explanation = explanation + error.definition = definition + yield error + if check._terminal: + terminate = True + break + if terminate: + break + + @property + def checks(self): + all = [check for check in vars(type(self)).values() + if hasattr(check, '_check_for')] + return sorted(all, key=lambda check: not check._terminal) + + @check_for(Definition, terminal=True) + def check_docstring_missing(self, definition, docstring): + """D10{0,1,2,3}: Public definitions should have docstrings. + + All modules should normally have docstrings. [...] all functions and + classes exported by a module should also have docstrings. Public + methods (including the __init__ constructor) should also have + docstrings. + + Note: Public (exported) definitions are either those with names listed + in __all__ variable (if present), or those that do not start + with a single underscore. + + """ + if (not docstring and definition.is_public or + docstring and is_blank(eval(docstring))): + codes = {Module: 'D100', Class: 'D101', NestedClass: 'D101', + Method: 'D102', Function: 'D103', NestedFunction: 'D103'} + return Error('%s: Docstring missing' % codes[type(definition)]) + + @check_for(Definition) + def check_one_liners(self, definition, docstring): + """D200: One-liner docstrings should fit on one line with quotes. + + The closing quotes are on the same line as the opening quotes. + This looks better for one-liners. + + """ + if docstring: + lines = eval(docstring).split('\n') + if len(lines) > 1: + non_empty_lines = sum(1 for l in lines if not is_blank(l)) + if non_empty_lines == 1: + return Error('D200: One-line docstring should not occupy ' + '%s lines' % len(lines)) + + @check_for(Function) + def check_no_blank_before(self, function, docstring): # def + """D20{1,2}: No blank lines allowed around function/method docstring. + + There's no blank line either before or after the docstring. + + """ + # NOTE: This does not take comments into account. + # NOTE: This does not take into account functions with groups of code. + if docstring: + before, _, after = function.source.partition(docstring) + blanks_before = list(map(is_blank, before.split('\n')[:-1])) + blanks_after = list(map(is_blank, after.split('\n')[1:])) + blanks_before_count = sum(takewhile(bool, reversed(blanks_before))) + blanks_after_count = sum(takewhile(bool, blanks_after)) + if blanks_before_count != 0: + yield Error('D201: No blank lines allowed *before* %s ' + 'docstring, found %s' + % (function.kind, blanks_before_count)) + if not all(blanks_after) and blanks_after_count != 0: + yield Error('D202: No blank lines allowed *after* %s ' + 'docstring, found %s' + % (function.kind, blanks_after_count)) + + @check_for(Class) + def check_blank_before_after_class(slef, class_, docstring): + """D20{3,4}: Class docstring should have 1 blank line around them. + + Insert a blank line before and after all docstrings (one-line or + multi-line) that document a class -- generally speaking, the class's + methods are separated from each other by a single blank line, and the + docstring needs to be offset from the first method by a blank line; + for symmetry, put a blank line between the class header and the + docstring. + + """ + # NOTE: this gives flase-positive in this case + # class Foo: + # + # """Docstring.""" + # + # + # # comment here + # def foo(): pass + if docstring: + before, _, after = class_.source.partition(docstring) + blanks_before = list(map(is_blank, before.split('\n')[:-1])) + blanks_after = list(map(is_blank, after.split('\n')[1:])) + blanks_before_count = sum(takewhile(bool, reversed(blanks_before))) + blanks_after_count = sum(takewhile(bool, blanks_after)) + if blanks_before_count != 1: + yield Error('D203: Expected 1 blank line *before* class ' + 'docstring, found %s' % blanks_before_count) + if not all(blanks_after) and blanks_after_count != 1: + yield Error('D204: Expected 1 blank line *after* class ' + 'docstring, found %s' % blanks_after_count) + + @check_for(Definition) + def check_blank_after_summary(self, definition, docstring): + """D205: Blank line missing between one-line summary and description. + + Multi-line docstrings consist of a summary line just like a one-line + docstring, followed by a blank line, followed by a more elaborate + description. The summary line may be used by automatic indexing tools; + it is important that it fits on one line and is separated from the + rest of the docstring by a blank line. + + """ + if docstring: + lines = eval(docstring).strip().split('\n') + if len(lines) > 1 and not is_blank(lines[1]): + return Error() + + @check_for(Definition) + def check_indent(self, definition, docstring): + """D20{6,7,8}: The entire docstring should be indented same as code. + + The entire docstring is indented the same as the quotes at its + first line. + + """ + if docstring: + before_docstring, _, _ = definition.source.partition(docstring) + _, _, indent = before_docstring.rpartition('\n') + lines = docstring.split('\n') + if len(lines) > 1: + lines = lines[1:] # First line does not need indent. + indents = [leading_space(l) for l in lines if not is_blank(l)] + if set(' \t') == set(''.join(indents) + indent): + return Error('D206: Docstring indented with both tabs and ' + 'spaces') + if (len(indents) > 1 and min(indents[:-1]) > indent + or indents[-1] > indent): + return Error('D208: Docstring is over-indented') + if min(indents) < indent: + return Error('D207: Docstring is under-indented') + + @check_for(Definition) + def check_newline_after_last_paragraph(self, definition, docstring): + """D209: Put multi-line docstring closing quotes on separate line. + + Unless the entire docstring fits on a line, place the closing + quotes on a line by themselves. + + """ + if docstring: + lines = [l for l in eval(docstring).split('\n') if not is_blank(l)] + if len(lines) > 1: + if docstring.split("\n")[-1].strip() not in ['"""', "'''"]: + return Error('D209: Put multi-line docstring closing ' + 'quotes on separate line') + + @check_for(Definition) + def check_triple_double_quotes(self, definition, docstring): + r'''D300: Use """triple double quotes""". + + For consistency, always use """triple double quotes""" around + docstrings. Use r"""raw triple double quotes""" if you use any + backslashes in your docstrings. For Unicode docstrings, use + u"""Unicode triple-quoted strings""". + + Note: Exception to this is made if the docstring contains + """ quotes in its body. + + ''' + if docstring and '"""' in eval(docstring) and docstring.startswith( + ("'''", "r'''", "u'''")): + # Allow ''' quotes if docstring contains """, because otherwise """ + # quotes could not be expressed inside docstring. Not in PEP 257. + return + if docstring and not docstring.startswith(('"""', 'r"""', 'u"""')): + quotes = "'''" if "'''" in docstring[:4] else "'" + return Error('D300: Expected """-quotes, got %s-quotes' % quotes) + + @check_for(Definition) + def check_backslashes(self, definition, docstring): + r'''D301: Use r""" if any backslashes in a docstring. + + Use r"""raw triple double quotes""" if you use any backslashes + (\) in your docstrings. + + ''' + # Just check that docstring is raw, check_triple_double_quotes + # ensures the correct quotes. + if docstring and '\\' in docstring and not docstring.startswith('r'): + return Error() + + @check_for(Definition) + def check_unicode_docstring(self, definition, docstring): + r'''D302: Use u""" for docstrings with Unicode. + + For Unicode docstrings, use u"""Unicode triple-quoted strings""". + + ''' + # Just check that docstring is unicode, check_triple_double_quotes + # ensures the correct quotes. + if docstring and sys.version_info[0] <= 2: + if not is_ascii(docstring) and not docstring.startswith('u'): + return Error() + + @check_for(Definition) + def check_ends_with_period(self, definition, docstring): + """D400: First line should end with a period. + + The [first line of a] docstring is a phrase ending in a period. + + """ + if docstring: + summary_line = eval(docstring).strip().split('\n')[0] + if not summary_line.endswith('.'): + return Error("D400: First line should end with '.', not %r" + % summary_line[-1]) + + @check_for(Function) + def check_imperative_mood(self, function, docstring): # def context + """D401: First line should be in imperative mood: 'Do', not 'Does'. + + [Docstring] prescribes the function or method's effect as a command: + ("Do this", "Return that"), not as a description; e.g. don't write + "Returns the pathname ...". + + """ + if docstring: + stripped = eval(docstring).strip() + if stripped: + first_word = stripped.split()[0] + if first_word.endswith('s') and not first_word.endswith('ss'): + return Error('D401: First line should be imperative: ' + '%r, not %r' % (first_word[:-1], first_word)) + + @check_for(Function) + def check_no_signature(self, function, docstring): # def context + """D402: First line should not be function's or method's "signature". + + The one-line docstring should NOT be a "signature" reiterating the + function/method parameters (which can be obtained by introspection). + + """ + if docstring: + first_line = eval(docstring).strip().split('\n')[0] + if function.name + '(' in first_line.replace(' ', ''): + return Error("D402: First line should not be %s's signature" + % function.kind) + + # Somewhat hard to determine if return value is mentioned. + # @check(Function) + def SKIP_check_return_type(self, function, docstring): + """D40x: Return value type should be mentioned. + + [T]he nature of the return value cannot be determined by + introspection, so it should be mentioned. + + """ + if docstring and function.returns_value: + if 'return' not in docstring.lower(): + return Error() if __name__ == '__main__': diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py index 6948302e..830b080e 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep8/__init__.py @@ -44,7 +44,7 @@ def error(self, line_number, offset, text, check): self.errors.append(dict( text=text, - type=code, + type=code.replace('E', 'C'), col=offset + 1, lnum=line_number, )) diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py index e0035b3d..215bde2d 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pylama/lint/pylama_pep8/pep8.py @@ -424,9 +424,12 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, parens = [0] * nrows # relative indents of physical lines rel_indent = [0] * nrows + # for each depth, collect a list of opening rows + open_rows = [[0]] # visual indents indent_chances = {} last_indent = tokens[0][2] + # for each depth, memorize the visual indent column indent = [last_indent[1]] if verbose >= 3: print(">>> " + tokens[0][4].rstrip()) @@ -448,17 +451,16 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, # record the initial indent. rel_indent[row] = expand_indent(line) - indent_level - if depth: - # a bracket expression in a continuation line. - # find the line that it was opened on - for open_row in range(row - 1, -1, -1): - if parens[open_row]: - break - else: - # an unbracketed continuation line (ie, backslash) - open_row = 0 - hang = rel_indent[row] - rel_indent[open_row] + # identify closing bracket close_bracket = (token_type == tokenize.OP and text in ']})') + + # is the indent relative to an opening bracket line? + valid_hang = 4 if (hang_closing or not close_bracket) else 0 + for open_row in reversed(open_rows[depth]): + if rel_indent[row] == rel_indent[open_row] + valid_hang: + break + hang = rel_indent[row] - rel_indent[open_row] + # is there any chance of visual indent? visual_indent = (not close_bracket and hang > 0 and indent_chances.get(start[1])) @@ -471,6 +473,16 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, # closing bracket matches indentation of opening bracket's line if hang_closing: yield start, "E133 closing bracket is missing indentation" + elif indent[depth] and start[1] < indent[depth]: + if visual_indent is not True: + # visual indent is broken + yield (start, "E128 continuation line " + "under-indented for visual indent") + elif hang == 4 or (indent_next and rel_indent[row] == 8): + # hanging indent is verified + if close_bracket and not hang_closing: + yield (start, "E123 closing bracket does not match " + "indentation of opening bracket's line") elif visual_indent is True: # visual indent is verified if not indent[depth]: @@ -478,15 +490,6 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, elif visual_indent in (text, str): # ignore token lined up with matching one from a previous line pass - elif indent[depth] and start[1] < indent[depth]: - # visual indent is broken - yield (start, "E128 continuation line " - "under-indented for visual indent") - elif hang == 4 or (indent_next and rel_indent[row] == 8): - # hanging indent is verified - if close_bracket and not hang_closing: - yield (start, "E123 closing bracket does not match " - "indentation of opening bracket's line") else: # indent is broken if hang <= 0: @@ -519,6 +522,9 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, if text in '([{': depth += 1 indent.append(0) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) parens[row] += 1 if verbose >= 4: print("bracket depth %s seen, col %s, visual min = %s" % @@ -532,13 +538,13 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, for ind in list(indent_chances): if ind >= prev_indent: del indent_chances[ind] + del open_rows[depth + 1:] depth -= 1 if depth: indent_chances[indent[depth]] = True for idx in range(row, -1, -1): if parens[idx]: parens[idx] -= 1 - rel_indent[row] = rel_indent[idx] break assert len(indent) == depth + 1 if start[1] not in indent_chances: @@ -1122,6 +1128,21 @@ def parse_udiff(diff, patterns=None, parent='.'): if rows and filename_match(path, patterns)]) +def normalize_paths(value, parent=os.curdir): + """Parse a comma-separated list of paths. + + Return a list of absolute paths. + """ + if not value or isinstance(value, list): + return value + paths = [] + for path in value.split(','): + if path.startswith('./'): + path = os.path.abspath(os.path.join(parent, path)) + paths.append(path.rstrip('/')) + return paths + + def filename_match(filename, patterns, default=True): """ Check if patterns contains a pattern that matches filename. @@ -1200,7 +1221,7 @@ def __init__(self, filename=None, lines=None, try: self.lines = readlines(filename) except IOError: - exc_type, exc = sys.exc_info()[:2] + (exc_type, exc) = sys.exc_info()[:2] self._io_error = '%s: %s' % (exc_type.__name__, exc) self.lines = [] else: @@ -1216,7 +1237,7 @@ def __init__(self, filename=None, lines=None, self.report_error = self.report.error def report_invalid_syntax(self): - exc_type, exc = sys.exc_info()[:2] + (exc_type, exc) = sys.exc_info()[:2] if len(exc.args) > 1: offset = exc.args[1] if len(offset) > 2: @@ -1266,7 +1287,7 @@ def check_physical(self, line): for name, check, argument_names in self._physical_checks: result = self.run_check(check, argument_names) if result is not None: - offset, text = result + (offset, text) = result self.report_error(self.line_number, offset, text, check) def build_tokens_line(self): @@ -1279,7 +1300,7 @@ def build_tokens_line(self): length = 0 previous = None for token in self.tokens: - token_type, text = token[0:2] + (token_type, text) = token[0:2] if token_type == tokenize.COMMENT: comments.append(text) continue @@ -1288,8 +1309,8 @@ def build_tokens_line(self): if token_type == tokenize.STRING: text = mute_string(text) if previous: - end_row, end = previous[3] - start_row, start = token[2] + (end_row, end) = previous[3] + (start_row, start) = token[2] if end_row != start_row: # different row prev_text = self.lines[end_row - 1][end - 1] if prev_text == ',' or (prev_text not in '{[(' @@ -1324,10 +1345,10 @@ def check_logical(self): for name, check, argument_names in self._logical_checks: if self.verbose >= 4: print(' ' + name) - for result in self.run_check(check, argument_names): - offset, text = result + for result in self.run_check(check, argument_names) or (): + (offset, text) = result if isinstance(offset, tuple): - orig_number, orig_offset = offset + (orig_number, orig_offset) = offset else: for token_offset, token in self.mapping: if offset >= token_offset: @@ -1341,10 +1362,10 @@ def check_ast(self): tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) except (SyntaxError, TypeError): return self.report_invalid_syntax() - for name, cls, _ in self._ast_checks: + for name, cls, __ in self._ast_checks: checker = cls(tree, self.filename) for lineno, offset, text, check in checker.run(): - if not noqa(self.lines[lineno - 1]): + if not self.lines or not noqa(self.lines[lineno - 1]): self.report_error(lineno, offset, text, check) def generate_tokens(self): @@ -1572,11 +1593,12 @@ def __init__(self, *args, **kwargs): parse_argv = kwargs.pop('parse_argv', False) config_file = kwargs.pop('config_file', None) parser = kwargs.pop('parser', None) + # build options from dict + options_dict = dict(*args, **kwargs) + arglist = None if parse_argv else options_dict.get('paths', None) options, self.paths = process_options( - parse_argv=parse_argv, config_file=config_file, parser=parser) - if args or kwargs: - # build options from dict - options_dict = dict(*args, **kwargs) + arglist, parse_argv, config_file, parser) + if options_dict: options.__dict__.update(options_dict) if 'paths' in options_dict: self.paths = options_dict['paths'] @@ -1587,8 +1609,6 @@ def __init__(self, *args, **kwargs): if not options.reporter: options.reporter = BaseReport if options.quiet else StandardReport - for index, value in enumerate(options.exclude): - options.exclude[index] = value.rstrip('/') options.select = tuple(options.select or ()) if not (options.select or options.ignore or options.testsuite or options.doctest) and DEFAULT_IGNORE: @@ -1668,6 +1688,7 @@ def excluded(self, filename, parent=None): return True if parent: filename = os.path.join(parent, filename) + filename = os.path.abspath(filename) return filename_match(filename, self.options.exclude) def ignore_code(self, code): @@ -1767,13 +1788,15 @@ def read_config(options, args, arglist, parser): print('user configuration: %s' % user_conf) config.read(user_conf) + local_dir = os.curdir parent = tail = args and os.path.abspath(os.path.commonprefix(args)) while tail: if config.read([os.path.join(parent, fn) for fn in PROJECT_CONFIG]): + local_dir = parent if options.verbose: print('local configuration: in %s' % parent) break - parent, tail = os.path.split(parent) + (parent, tail) = os.path.split(parent) pep8_section = parser.prog if config.has_section(pep8_section): @@ -1781,7 +1804,7 @@ def read_config(options, args, arglist, parser): for o in parser.option_list]) # First, read the default values - new_options, _ = parser.parse_args([]) + (new_options, __) = parser.parse_args([]) # Second, parse the configuration for opt in config.options(pep8_section): @@ -1797,13 +1820,15 @@ def read_config(options, args, arglist, parser): value = config.getint(pep8_section, opt) elif opt_type == 'string': value = config.get(pep8_section, opt) + if normalized_opt == 'exclude': + value = normalize_paths(value, local_dir) else: assert opt_type in ('store_true', 'store_false') value = config.getboolean(pep8_section, opt) setattr(new_options, normalized_opt, value) # Third, overwrite with the command-line options - options, _ = parser.parse_args(arglist, values=new_options) + (options, __) = parser.parse_args(arglist, values=new_options) options.doctest = options.testsuite = False return options @@ -1811,9 +1836,6 @@ def read_config(options, args, arglist, parser): def process_options(arglist=None, parse_argv=False, config_file=None, parser=None): """Process options passed either via arglist or via command line args.""" - if not arglist and not parse_argv: - # Don't read the command line if the module is used as a library. - arglist = [] if not parser: parser = get_parser() if not parser.has_option('--config'): @@ -1826,7 +1848,12 @@ def process_options(arglist=None, parse_argv=False, config_file=None, (parser.prog, ', '.join(parser.config_options)))) group.add_option('--config', metavar='path', default=config_file, help="user config file location (default: %default)") - options, args = parser.parse_args(arglist) + # Don't read the command line if the module is used as a library. + if not arglist and not parse_argv: + arglist = [] + # If parse_argv is True and arglist is None, arguments are + # parsed from the command line (sys.argv) + (options, args) = parser.parse_args(arglist) options.reporter = None if options.ensure_value('testsuite', False): @@ -1842,7 +1869,7 @@ def process_options(arglist=None, parse_argv=False, config_file=None, options.reporter = parse_argv and options.quiet == 1 and FileReport options.filename = options.filename and options.filename.split(',') - options.exclude = options.exclude.split(',') + options.exclude = normalize_paths(options.exclude) options.select = options.select and options.select.split(',') options.ignore = options.ignore and options.ignore.split(',') diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py index 4e0fd97a..f6445453 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py @@ -1,12 +1,25 @@ """ Check Pyflakes. """ +import sys +from os import path as op from .. import Linter as BaseLinter +# Use local version of pyflakes +path = op.dirname(op.abspath(__file__)) +sys.path.insert(0, path) + + class Linter(BaseLinter): """ Pyflakes code check. """ + def __init__(self): + from pyflakes import messages + + if messages.UndefinedName.message != "E0602 undefined name %r": + monkey_patch_messages(messages) + @staticmethod def run(path, code=None, builtins="", **meta): """ Pyflake code checking. @@ -16,7 +29,7 @@ def run(path, code=None, builtins="", **meta): """ import _ast import os - from .pyflakes import checker + from pyflakes import checker os.environ.setdefault('PYFLAKES_BUILTINS', builtins) @@ -30,3 +43,22 @@ def run(path, code=None, builtins="", **meta): text=w.message % w.message_args, )) return errors + + +def monkey_patch_messages(messages): + """ Patch pyflakes messages. """ + + messages.LateFutureImport.message = "W0410 future import(s) %r after other statements" + messages.ImportStarUsed.message = "W0401 'from %s import *' used; unable to detect undefined names" + messages.RedefinedWhileUnused.message = "W0404 redefinition of unused %r from line %r" + messages.DoctestSyntaxError.message = "W0511 syntax error in doctest" + messages.UnusedImport.message = "W0611 %r imported but unused" + messages.UnusedVariable.message = "W0612 local variable %r is assigned to but never used" + messages.RedefinedInListComp.message = "W0621 list comprehension redefines %r from line %r" + messages.Redefined.message = "W0621 redefinition of %r from line %r" + messages.ImportShadowedByLoopVar.message = "W0621 import %r from line %r shadowed by loop variable" + messages.ReturnWithArgsInsideGenerator.message = "E0106 'return' with argument inside generator" + messages.UndefinedName.message = "E0602 undefined name %r" + messages.UndefinedLocal.message = "E0602 local variable %r (defined in enclosing scope on line %r) referenced before assignment" + messages.UndefinedExport.message = "E0603 undefined name %r in __all__" + messages.DuplicateArgument.message = "E1122 duplicate argument %r in function definition" diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py index ca95838a..53bc0721 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py @@ -1,2 +1,2 @@ -__version__ = '0.7.3a0' +__version__ = '0.8' diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py index 272caa6d..e756ff42 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py @@ -7,12 +7,11 @@ import doctest import os import sys -try: - builtin_vars = dir(__import__('builtins')) - PY2 = False -except ImportError: - builtin_vars = dir(__import__('__builtin__')) - PY2 = True + +PY2 = sys.version_info < (3, 0) +PY32 = sys.version_info < (3, 3) # Python 2.5 to 3.2 +PY33 = sys.version_info < (3, 4) # Python 2.5 to 3.3 +builtin_vars = dir(__import__('__builtin__' if PY2 else 'builtins')) try: import ast @@ -45,7 +44,7 @@ def iter_child_nodes(node): ast_TryExcept = ast.TryExcept ast_TryFinally = ast.TryFinally -from . import messages +from pyflakes import messages if PY2: @@ -142,16 +141,16 @@ class ExportBinding(Binding): Names which are imported and not otherwise used but appear in the value of C{__all__} will not have an unused import warning reported for them. """ - def names(self): - """ - Return a list of the names referenced by this binding. - """ - names = [] - if isinstance(self.source, ast.List): - for node in self.source.elts: + def __init__(self, name, source, scope): + if '__all__' in scope and isinstance(source, ast.AugAssign): + self.names = list(scope['__all__'].names) + else: + self.names = [] + if isinstance(source.value, (ast.List, ast.Tuple)): + for node in source.value.elts: if isinstance(node, ast.Str): - names.append(node.s) - return names + self.names.append(node.s) + super(ExportBinding, self).__init__(name, source) class Scope(dict): @@ -180,6 +179,8 @@ def __init__(self): super(FunctionScope, self).__init__() # Simplify: manage the special locals as globals self.globals = self.alwaysUsed.copy() + self.returnValue = None # First non-empty return + self.isGenerator = False # Detect a generator def unusedAssignments(self): """ @@ -229,7 +230,6 @@ class Checker(object): nodeDepth = 0 offset = None traceTree = False - withDoctest = ('PYFLAKES_NODOCTEST' not in os.environ) builtIns = set(builtin_vars).union(_MAGIC_GLOBALS) _customBuiltIns = os.environ.get('PYFLAKES_BUILTINS') @@ -237,7 +237,8 @@ class Checker(object): builtIns.update(_customBuiltIns.split(',')) del _customBuiltIns - def __init__(self, tree, filename='(none)', builtins=None): + def __init__(self, tree, filename='(none)', builtins=None, + withDoctest='PYFLAKES_DOCTEST' in os.environ): self._nodeHandlers = {} self._deferredFunctions = [] self._deferredAssignments = [] @@ -246,6 +247,7 @@ def __init__(self, tree, filename='(none)', builtins=None): self.filename = filename if builtins: self.builtIns = self.builtIns.union(builtins) + self.withDoctest = withDoctest self.scopeStack = [ModuleScope()] self.exceptHandlers = [()] self.futuresAllowed = True @@ -305,33 +307,28 @@ def checkDeadScopes(self): for scope in self.deadScopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: - all = scope['__all__'].names() + all_names = set(scope['__all__'].names) if not scope.importStarred and \ os.path.basename(self.filename) != '__init__.py': # Look for possible mistakes in the export list - undefined = set(all) - set(scope) + undefined = all_names.difference(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: - all = [] + all_names = [] # Look for imported names that aren't used. for importation in scope.values(): - if isinstance(importation, Importation): - if not importation.used and importation.name not in all: - self.report(messages.UnusedImport, - importation.source, importation.name) + if (isinstance(importation, Importation) and + not importation.used and + importation.name not in all_names): + self.report(messages.UnusedImport, + importation.source, importation.name) def pushScope(self, scopeClass=FunctionScope): self.scopeStack.append(scopeClass()) - def pushFunctionScope(self): # XXX Deprecated - self.pushScope(FunctionScope) - - def pushClassScope(self): # XXX Deprecated - self.pushScope(ClassScope) - def report(self, messageClass, *args, **kwargs): self.messages.append(messageClass(self.filename, *args, **kwargs)) @@ -351,9 +348,9 @@ def getCommonAncestor(self, lnode, rnode, stop=None): if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return - if (lnode.level > rnode.level): + if (lnode.depth > rnode.depth): return self.getCommonAncestor(lnode.parent, rnode, stop) - if (rnode.level > lnode.level): + if (rnode.depth > lnode.depth): return self.getCommonAncestor(lnode, rnode.parent, stop) return self.getCommonAncestor(lnode.parent, rnode.parent, stop) @@ -496,7 +493,7 @@ def handleNodeStore(self, node): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): - binding = ExportBinding(name, parent.value) + binding = ExportBinding(name, parent, self.scope) else: binding = Assignment(name, node) if name in self.scope: @@ -548,7 +545,7 @@ def handleNode(self, node, parent): self.isDocstring(node)): self.futuresAllowed = False self.nodeDepth += 1 - node.level = self.nodeDepth + node.depth = self.nodeDepth node.parent = parent try: handler = self.getNodeHandler(node.__class__) @@ -578,12 +575,17 @@ def handleDoctests(self, node): except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, - example.indent + 4 + e.offset) + example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) + underscore_in_builtins = '_' in self.builtIns + if not underscore_in_builtins: + self.builtIns.add('_') self.handleChildren(tree) + if not underscore_in_builtins: + self.builtIns.remove('_') self.offset = node_offset self.popScope() @@ -591,15 +593,15 @@ def ignore(self, node): pass # "stmt" type nodes - RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = \ + DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = \ TRYFINALLY = ASSERT = EXEC = EXPR = handleChildren CONTINUE = BREAK = PASS = ignore # "expr" type nodes - BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = \ + BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = \ COMPARE = CALL = REPR = ATTRIBUTE = SUBSCRIPT = LIST = TUPLE = \ - STARRED = handleChildren + STARRED = NAMECONSTANT = handleChildren NUM = STR = BYTES = ELLIPSIS = ignore @@ -695,6 +697,17 @@ def NAME(self, node): # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) + def RETURN(self, node): + if node.value and not self.scope.returnValue: + self.scope.returnValue = node.value + self.handleNode(node.value, node) + + def YIELD(self, node): + self.scope.isGenerator = True + self.handleNode(node.value, node) + + YIELDFROM = YIELD + def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) @@ -705,6 +718,7 @@ def FUNCTIONDEF(self, node): def LAMBDA(self, node): args = [] + annotations = [] if PY2: def addArgs(arglist): @@ -712,34 +726,41 @@ def addArgs(arglist): if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: - if arg.id in args: - self.report(messages.DuplicateArgument, - node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: - if arg.arg in args: - self.report(messages.DuplicateArgument, - node, arg.arg) args.append(arg.arg) - self.handleNode(arg.annotation, node) - if hasattr(node, 'returns'): # Only for FunctionDefs - for annotation in (node.args.varargannotation, - node.args.kwargannotation, node.returns): - self.handleNode(annotation, node) + annotations.append(arg.annotation) defaults = node.args.defaults + node.args.kw_defaults - # vararg/kwarg identifiers are not Name nodes - for wildcard in (node.args.vararg, node.args.kwarg): + # Only for Python3 FunctionDefs + is_py3_func = hasattr(node, 'returns') + + for arg_name in ('vararg', 'kwarg'): + wildcard = getattr(node.args, arg_name) if not wildcard: continue - if wildcard in args: - self.report(messages.DuplicateArgument, node, wildcard) - args.append(wildcard) - for default in defaults: - self.handleNode(default, node) + args.append(wildcard if PY33 else wildcard.arg) + if is_py3_func: + if PY33: # Python 2.5 to 3.3 + argannotation = arg_name + 'annotation' + annotations.append(getattr(node.args, argannotation)) + else: # Python >= 3.4 + annotations.append(wildcard.annotation) + + if is_py3_func: + annotations.append(node.returns) + + if len(set(args)) < len(args): + for (idx, arg) in enumerate(args): + if arg in args[:idx]: + self.report(messages.DuplicateArgument, node, arg) + + for child in annotations + defaults: + if child: + self.handleNode(child, node) def runFunction(): @@ -761,6 +782,17 @@ def checkUnusedAssignments(): for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.deferAssignment(checkUnusedAssignments) + + if PY32: + def checkReturnWithArgumentInsideGenerator(): + """ + Check to see if there is any return statement with + arguments but the function is a generator. + """ + if self.scope.isGenerator and self.scope.returnValue: + self.report(messages.ReturnWithArgsInsideGenerator, + self.scope.returnValue) + self.deferAssignment(checkReturnWithArgumentInsideGenerator) self.popScope() self.deferFunction(runFunction) diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py index a4c31985..1f799ec5 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py @@ -18,7 +18,7 @@ def __str__(self): class UnusedImport(Message): - message = 'W0611 %r imported but unused' + message = '%r imported but unused' def __init__(self, filename, loc, name): Message.__init__(self, filename, loc) @@ -26,7 +26,7 @@ def __init__(self, filename, loc, name): class RedefinedWhileUnused(Message): - message = 'W0404 redefinition of unused %r from line %r' + message = 'redefinition of unused %r from line %r' def __init__(self, filename, loc, name, orig_loc): Message.__init__(self, filename, loc) @@ -34,7 +34,7 @@ def __init__(self, filename, loc, name, orig_loc): class RedefinedInListComp(Message): - message = 'W0621 list comprehension redefines %r from line %r' + message = 'list comprehension redefines %r from line %r' def __init__(self, filename, loc, name, orig_loc): Message.__init__(self, filename, loc) @@ -42,7 +42,7 @@ def __init__(self, filename, loc, name, orig_loc): class ImportShadowedByLoopVar(Message): - message = 'W0621 import %r from line %r shadowed by loop variable' + message = 'import %r from line %r shadowed by loop variable' def __init__(self, filename, loc, name, orig_loc): Message.__init__(self, filename, loc) @@ -50,7 +50,7 @@ def __init__(self, filename, loc, name, orig_loc): class ImportStarUsed(Message): - message = "W0401 'from %s import *' used; unable to detect undefined names" + message = "'from %s import *' used; unable to detect undefined names" def __init__(self, filename, loc, modname): Message.__init__(self, filename, loc) @@ -58,7 +58,7 @@ def __init__(self, filename, loc, modname): class UndefinedName(Message): - message = 'E0602 undefined name %r' + message = 'undefined name %r' def __init__(self, filename, loc, name): Message.__init__(self, filename, loc) @@ -66,7 +66,7 @@ def __init__(self, filename, loc, name): class DoctestSyntaxError(Message): - message = 'W0511 syntax error in doctest' + message = 'syntax error in doctest' def __init__(self, filename, loc, position=None): Message.__init__(self, filename, loc) @@ -76,7 +76,7 @@ def __init__(self, filename, loc, position=None): class UndefinedExport(Message): - message = 'E0603 undefined name %r in __all__' + message = 'undefined name %r in __all__' def __init__(self, filename, loc, name): Message.__init__(self, filename, loc) @@ -84,7 +84,7 @@ def __init__(self, filename, loc, name): class UndefinedLocal(Message): - message = ('E0602 local variable %r (defined in enclosing scope on line %r) ' + message = ('local variable %r (defined in enclosing scope on line %r) ' 'referenced before assignment') def __init__(self, filename, loc, name, orig_loc): @@ -93,7 +93,7 @@ def __init__(self, filename, loc, name, orig_loc): class DuplicateArgument(Message): - message = 'E1122 duplicate argument %r in function definition' + message = 'duplicate argument %r in function definition' def __init__(self, filename, loc, name): Message.__init__(self, filename, loc) @@ -101,7 +101,7 @@ def __init__(self, filename, loc, name): class Redefined(Message): - message = 'W0621 redefinition of %r from line %r' + message = 'redefinition of %r from line %r' def __init__(self, filename, loc, name, orig_loc): Message.__init__(self, filename, loc) @@ -109,7 +109,7 @@ def __init__(self, filename, loc, name, orig_loc): class LateFutureImport(Message): - message = 'W0410 future import(s) %r after other statements' + message = 'future import(s) %r after other statements' def __init__(self, filename, loc, names): Message.__init__(self, filename, loc) @@ -121,8 +121,15 @@ class UnusedVariable(Message): Indicates that a variable has been explicity assigned to but not actually used. """ - message = 'W0612 local variable %r is assigned to but never used' + message = 'local variable %r is assigned to but never used' def __init__(self, filename, loc, names): Message.__init__(self, filename, loc) self.message_args = (names,) + + +class ReturnWithArgsInsideGenerator(Message): + """ + Indicates a return statement with arguments inside a generator. + """ + message = '\'return\' with argument inside generator' diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index a65598db..92150ae4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,7 +4,7 @@ # ================== -__version__ = '0.1.5' +__version__ = '0.1.6' __project__ = 'pylama_pylint' __author__ = "horneds " __license__ = "BSD" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py index af602765..19c80902 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py @@ -106,13 +106,13 @@ def transform(node, infer_function=infer_function): return transform # load brain plugins -# from os import listdir -# from os.path import join, dirname -# BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') -# if BRAIN_MODULES_DIR not in sys.path: - # # add it to the end of the list so user path take precedence - # sys.path.append(BRAIN_MODULES_DIR) -# # load modules in this directory -# for module in listdir(BRAIN_MODULES_DIR): - # if module.endswith('.py'): - # __import__(module[:-3]) +from os import listdir +from os.path import join, dirname +BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') +if BRAIN_MODULES_DIR not in sys.path: + # add it to the end of the list so user path take precedence + sys.path.append(BRAIN_MODULES_DIR) +# load modules in this directory +for module in listdir(BRAIN_MODULES_DIR): + if module.endswith('.py'): + __import__(module[:-3]) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py index fcff19eb..72c5b6c3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py @@ -144,7 +144,17 @@ def visit_class(self, node): """return an astroid.Class node as string""" decorate = node.decorators and node.decorators.accept(self) or '' bases = ', '.join([n.accept(self) for n in node.bases]) - bases = bases and '(%s)' % bases or '' + if sys.version_info[0] == 2: + bases = bases and '(%s)' % bases or '' + else: + metaclass = node.metaclass() + if metaclass: + if bases: + bases = '(%s, metaclass=%s)' % (bases, metaclass.name) + else: + bases = '(metaclass=%s)' % metaclass.name + else: + bases = bases and '(%s)' % bases or '' docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, self._stmt_list( node.body)) @@ -389,6 +399,8 @@ def visit_tryfinally(self, node): def visit_tuple(self, node): """return an astroid.Tuple node as string""" + if len(node.elts) == 1: + return '(%s, )' % node.elts[0].accept(self) return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) def visit_unaryop(self, node): diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py new file mode 100644 index 00000000..5001b7cb --- /dev/null +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py @@ -0,0 +1,147 @@ +"""Astroid hooks for the Python 2 GObject introspection bindings. + +Helps with understanding everything imported from 'gi.repository' +""" + +import inspect +import sys + +from astroid import MANAGER, AstroidBuildingException +from astroid.builder import AstroidBuilder + + +_inspected_modules = {} + + +def _gi_build_stub(parent): + """ + Inspect the passed module recursively and build stubs for functions, + classes, etc. + """ + classes = {} + functions = {} + constants = {} + methods = {} + for name in dir(parent): + if not name or name.startswith("__"): + # GLib.IConv has a parameter named "" :/ + continue + try: + obj = getattr(parent, name) + except: + continue + + if inspect.isclass(obj): + classes[name] = obj + elif (inspect.isfunction(obj) or + inspect.isbuiltin(obj)): + functions[name] = obj + elif (inspect.ismethod(obj) or + inspect.ismethoddescriptor(obj)): + methods[name] = obj + elif type(obj) in [int, str]: + constants[name] = obj + elif (str(obj).startswith("= (3, 0) +PY34 = sys.version_info >= (3, 4) def _init_set_doc(node, newnode): newnode.doc = None @@ -114,7 +116,11 @@ def _set_infos(oldnode, newnode, parent): newnode.col_offset = oldnode.col_offset newnode.set_line_info(newnode.last_child()) # set_line_info accepts None - +def _infer_metaclass(node): + if isinstance(node, Name): + return node.id + elif isinstance(node, Attribute): + return node.attr class TreeRebuilder(object): @@ -187,13 +193,20 @@ def visit_arguments(self, node, parent): newnode.defaults = [self.visit(child, newnode) for child in node.defaults] newnode.kwonlyargs = [] newnode.kw_defaults = [] - newnode.vararg = node.vararg - newnode.kwarg = node.kwarg + vararg, kwarg = node.vararg, node.kwarg + # change added in 82732 (7c5c678e4164), vararg and kwarg + # are instances of `_ast.arg`, not strings + if vararg and PY34: + vararg = vararg.arg + if kwarg and PY34: + kwarg = kwarg.arg + newnode.vararg = vararg + newnode.kwarg = kwarg # save argument names in locals: - if node.vararg: - newnode.parent.set_local(newnode.vararg, newnode) - if node.kwarg: - newnode.parent.set_local(newnode.kwarg, newnode) + if vararg: + newnode.parent.set_local(vararg, newnode) + if kwarg: + newnode.parent.set_local(kwarg, newnode) newnode.set_line_info(newnode.last_child()) return newnode @@ -245,7 +258,7 @@ def visit_assign(self, node, parent): continue elif getattr(newnode.targets[0], 'name', None) == '__metaclass__': # XXX check more... - self._metaclass[-1] = 'type' # XXX get the actual metaclass + self._metaclass[-1] = _infer_metaclass(node.value) newnode.set_line_info(newnode.last_child()) return newnode @@ -328,10 +341,13 @@ def visit_class(self, node, parent): newnode.decorators = self.visit_decorators(node, newnode) newnode.set_line_info(newnode.last_child()) metaclass = self._metaclass.pop() - if not newnode.bases: - # no base classes, detect new / style old style according to - # current scope - newnode._newstyle = metaclass == 'type' + if PY3K: + newnode._newstyle = True + else: + if not newnode.bases: + # no base classes, detect new / style old style according to + # current scope + newnode._newstyle = metaclass in ('type', 'ABCMeta') newnode.parent.frame().set_local(newnode.name, newnode) return newnode @@ -838,6 +854,12 @@ def visit_arg(self, node, parent): # XXX or we should instead introduce a Arg node in astroid ? return self.visit_assname(node, parent, node.arg) + def visit_nameconstant(self, node, parent): + # in Python 3.4 we have NameConstant for True / False / None + newnode = new.Const(node.value) + _set_infos(node, newnode, parent) + return newnode + def visit_arguments(self, node, parent): newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent) self.asscontext = "Ass" @@ -934,6 +956,14 @@ def visit_child(child): def visit_yieldfrom(self, node, parent): return self.visit_yield(node, parent) + def visit_class(self, node, parent): + newnode = super(TreeRebuilder3k, self).visit_class(node, parent) + for keyword in node.keywords: + if keyword.arg == 'metaclass': + newnode._metaclass = self.visit(keyword, newnode).value + break + return newnode + if sys.version_info >= (3, 0): TreeRebuilder = TreeRebuilder3k diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py index 0ee29be4..a7f6ee8c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py @@ -990,3 +990,25 @@ class node yield iface if missing: raise InferenceError() + + _metaclass = None + def metaclass(self): + """ Return the metaclass of this class """ + if self._metaclass: + # Expects this from Py3k TreeRebuilder + try: + return next(self._metaclass.infer()) + except InferenceError: + return + + try: + meta = self.getattr('__metaclass__')[0] + except NotFoundError: + return + try: + infered = meta.infer().next() + except InferenceError: + return + if infered is YES: # don't expose this + return None + return infered diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py index 6352866d..d3be5552 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py @@ -25,7 +25,7 @@ subpackage_of = 'logilab' subpackage_master = True -numversion = (0, 60, 0) +numversion = (0, 61, 0) version = '.'.join([str(num) for num in numversion]) license = 'LGPL' # 2.1 or later diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py index c5685ec2..02e4edbb 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py @@ -80,7 +80,7 @@ def decorator(func): if '%s' in message: message %= func.func_name def wrapped(*args, **kwargs): - self.warn(version, message, stacklevel) + self.warn(version, message, stacklevel+1) return func(*args, **kwargs) return wrapped return decorator @@ -92,7 +92,7 @@ class metaclass(type): def __call__(cls, *args, **kwargs): msg = getattr(cls, "__deprecation_warning__", "%(cls)s is deprecated") % {'cls': cls.__name__} - self.warn(version, msg) + self.warn(version, msg, stacklevel=3) return type.__call__(cls, *args, **kwargs) return metaclass @@ -129,7 +129,7 @@ class DeprecatedClass(new_class): """FIXME: There might be a better way to handle old/new-style class """ def __init__(self, *args, **kwargs): - self.warn(version, message) + self.warn(version, message, stacklevel=3) new_class.__init__(self, *args, **kwargs) return DeprecatedClass diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py index 94a71b6e..d62e8c09 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py @@ -134,14 +134,14 @@ def emit_edge(self, name1, name2, **props): """ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] n_from, n_to = normalize_node_id(name1), normalize_node_id(name2) - self.emit('%s -> %s [%s];' % (n_from, n_to, ", ".join(attrs)) ) + self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))) ) def emit_node(self, name, **props): """emit a node with given properties. node properties: see http://www.graphviz.org/doc/info/attrs.html """ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] - self.emit('%s [%s];' % (normalize_node_id(name), ", ".join(attrs))) + self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs)))) def normalize_node_id(nid): """Returns a suitable DOT node id for `nid`.""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py index 9d0bb495..27568412 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py @@ -27,6 +27,8 @@ :type BUILTIN_MODULES: dict :var BUILTIN_MODULES: dictionary with builtin module names has key """ +from __future__ import with_statement + __docformat__ = "restructuredtext en" import sys @@ -656,14 +658,19 @@ def _module_file(modpath, path=None): '.'.join(imported))) # XXX guess if package is using pkgutil.extend_path by looking for # those keywords in the first four Kbytes - data = open(join(mp_filename, '__init__.py')).read(4096) - if 'pkgutil' in data and 'extend_path' in data: - # extend_path is called, search sys.path for module/packages of this name - # see pkgutil.extend_path documentation - path = [join(p, modname) for p in sys.path - if isdir(join(p, modname))] - else: + try: + with open(join(mp_filename, '__init__.py')) as stream: + data = stream.read(4096) + except IOError: path = [mp_filename] + else: + if 'pkgutil' in data and 'extend_path' in data: + # extend_path is called, search sys.path for module/packages + # of this name see pkgutil.extend_path documentation + path = [join(p, *imported) for p in sys.path + if isdir(join(p, *imported))] + else: + path = [mp_filename] return mtype, mp_filename def _is_python_file(filename): diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 8dcd8df7..131392d3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -1,11 +1,11 @@ """ Pylint support. """ from os import path as op, environ -from pylama.lint import Linter as BaseLinter # noqa +from pylama.lint import Linter as BaseLinter -from astroid import MANAGER # noqa -from pylint.lint import Run # noqa -from pylint.reporters import BaseReporter # noqa +from astroid import MANAGER +from pylint.lint import Run +from pylint.reporters import BaseReporter PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc')) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py index dfb4386b..eed1b62f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import sys def run_pylint(): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py index 614828ee..d0ad387c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py @@ -13,15 +13,15 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """pylint packaging information""" modname = distname = 'pylint' -numversion = (1, 0, 0) +numversion = (1, 1, 0) version = '.'.join([str(num) for num in numversion]) -install_requires = ['logilab-common >= 0.53.0', 'astroid >= 0.24.3'] +install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1'] license = 'GPL' description = "python code static checker" diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py index 27dc3645..c68f2a8f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """utilities methods and classes for checkers Base id of standard checkers (used in msg and report ids): @@ -99,10 +99,6 @@ def add_message(self, msg_id, line=None, node=None, args=None): """add a message of a given type""" self.linter.add_message(msg_id, line, node, args) - def package_dir(self): - """return the base directory for the analysed package""" - return dirname(self.linter.base_file) - # dummy methods implementing the IChecker interface def open(self): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py index de40c4cb..11198acb 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py @@ -13,13 +13,13 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """basic checker for Python code""" import sys import astroid from logilab.common.ureports import Table -from astroid import are_exclusive +from astroid import are_exclusive, InferenceError import astroid.bases from pylint.interfaces import IAstroidChecker @@ -33,6 +33,8 @@ is_inside_except, overrides_a_method, safe_infer, + get_argument_from_call, + NoSuchArgumentError, ) @@ -47,6 +49,14 @@ CLASS_ATTRIBUTE_RGX = re.compile(r'([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$') # do not require a doc string on system methods NO_REQUIRED_DOC_RGX = re.compile('__.*__') +REVERSED_METHODS = (('__getitem__', '__len__'), + ('__reversed__', )) + +PY33 = sys.version_info >= (3, 3) +BAD_FUNCTIONS = ['map', 'filter', 'apply'] +if sys.version_info < (3, 0): + BAD_FUNCTIONS.append('input') + BAD_FUNCTIONS.append('file') del re @@ -79,12 +89,21 @@ def _loop_exits_early(loop): # in orelse. for child in loop.body: if isinstance(child, loop_nodes): + # break statement may be in orelse of child loop. + for orelse in (child.orelse or ()): + for _ in orelse.nodes_of_class(astroid.Break, skip_klass=loop_nodes): + return True continue for _ in child.nodes_of_class(astroid.Break, skip_klass=loop_nodes): return True return False - +if sys.version_info < (3, 0): + PROPERTY_CLASSES = set(('__builtin__.property', 'abc.abstractproperty')) +else: + PROPERTY_CLASSES = set(('builtins.property', 'abc.abstractproperty')) +ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod', + 'abc.abstractclassmethod', 'abc.abstractstaticmethod')) def _determine_function_name_type(node): """Determine the name type whose regex the a function's name should match. @@ -105,8 +124,7 @@ def _determine_function_name_type(node): (isinstance(decorator, astroid.Getattr) and decorator.attrname == 'abstractproperty')): infered = safe_infer(decorator) - if (infered and - infered.qname() in ('__builtin__.property', 'abc.abstractproperty')): + if infered and infered.qname() in PROPERTY_CLASSES: return 'attr' # If the function is decorated using the prop_method.{setter,getter} # form, treat it like an attribute as well. @@ -115,6 +133,25 @@ def _determine_function_name_type(node): return 'attr' return 'method' +def decorated_with_abc(func): + """ Determine if the `func` node is decorated + with `abc` decorators (abstractmethod et co.) + """ + if func.decorators: + for node in func.decorators.nodes: + try: + infered = node.infer().next() + except InferenceError: + continue + if infered and infered.qname() in ABC_METHODS: + return True + +def has_abstract_methods(node): + """ Determine if the given `node` has + abstract methods, defined with `abc` module. + """ + return any(decorated_with_abc(meth) + for meth in node.mymethods()) def report_by_type_stats(sect, stats, old_stats): """make a report of @@ -205,7 +242,8 @@ class BasicErrorChecker(_BasicChecker): 'return-arg-in-generator', 'Used when a "return" statement with an argument is found ' 'outside in a generator function or method (e.g. with some ' - '"yield" statements).'), + '"yield" statements).', + {'maxversion': (3, 3)}), 'E0107': ("Use of the non-existent %s operator", 'nonexistent-operator', "Used when you attempt to use the C-style pre-increment or" @@ -214,6 +252,11 @@ class BasicErrorChecker(_BasicChecker): 'duplicate-argument-name', 'Duplicate argument names in function definitions are syntax' ' errors.'), + 'E0110': ('Abstract class with abstract methods instantiated', + 'abstract-class-instantiated', + 'Used when an abstract class with `abc.ABCMeta` as metaclass ' + 'has abstract methods and is instantiated.', + {'minversion': (3, 0)}), 'W0120': ('Else clause on loop without a break statement', 'useless-else-on-loop', 'Loops should only have an else clause if they can exit early ' @@ -247,15 +290,16 @@ def visit_function(self, node): not (v is None or (isinstance(v, astroid.Const) and v.value is None) or (isinstance(v, astroid.Name) and v.name == 'None') - ) ]: + )]: self.add_message('return-in-init', node=node) elif node.is_generator(): # make sure we don't mix non-None returns and yields - for retnode in returns: - if isinstance(retnode.value, astroid.Const) and \ - retnode.value.value is not None: - self.add_message('return-arg-in-generator', node=node, - line=retnode.fromlineno) + if not PY33: + for retnode in returns: + if isinstance(retnode.value, astroid.Const) and \ + retnode.value.value is not None: + self.add_message('return-arg-in-generator', node=node, + line=retnode.fromlineno) # Check for duplicate names args = set() for name in node.argnames(): @@ -299,6 +343,34 @@ def visit_unaryop(self, node): (node.operand.op == node.op)): self.add_message('nonexistent-operator', node=node, args=node.op*2) + @check_messages('abstract-class-instantiated') + def visit_callfunc(self, node): + """ Check instantiating abstract class with + abc.ABCMeta as metaclass. + """ + try: + infered = node.func.infer().next() + except astroid.InferenceError: + return + if not isinstance(infered, astroid.Class): + return + # __init__ was called + metaclass = infered.metaclass() + if metaclass is None: + # Python 3.4 has `abc.ABC`, which won't be detected + # by ClassNode.metaclass() + for ancestor in infered.ancestors(): + if (ancestor.qname() == 'abc.ABC' and + has_abstract_methods(infered)): + + self.add_message('abstract-class-instantiated', node=node) + break + return + if (metaclass.qname() == 'abc.ABCMeta' and + has_abstract_methods(infered)): + + self.add_message('abstract-class-instantiated', node=node) + def _check_else_on_loop(self, node): """Check that any loop with an else clause has a break statement.""" if node.orelse and not _loop_exits_early(node): @@ -409,6 +481,16 @@ class BasicChecker(_BasicChecker): 'C0121': ('Missing required attribute "%s"', # W0103 'missing-module-attribute', 'Used when an attribute required for modules is missing.'), + + 'E0109': ('Missing argument to reversed()', + 'missing-reversed-argument', + 'Used when reversed() builtin didn\'t receive an argument.'), + 'E0111': ('The first reversed() argument is not a sequence', + 'bad-reversed-sequence', + 'Used when the first argument to reversed() builtin ' + 'isn\'t a sequence (does not implement __reversed__, ' + 'nor __getitem__ and __len__'), + } options = (('required-attributes', @@ -418,13 +500,13 @@ class BasicChecker(_BasicChecker): 'comma'} ), ('bad-functions', - {'default' : ('map', 'filter', 'apply', 'input'), + {'default' : BAD_FUNCTIONS, 'type' :'csv', 'metavar' : '', 'help' : 'List of builtins function names that should not be ' 'used, separated by a comma'} ), ) - reports = ( ('RP0101', 'Statistics by type', report_by_type_stats), ) + reports = (('RP0101', 'Statistics by type', report_by_type_stats),) def __init__(self, linter): _BasicChecker.__init__(self, linter) @@ -598,7 +680,9 @@ def visit_exec(self, node): """just print a warning on exec statements""" self.add_message('exec-used', node=node) - @check_messages('bad-builtin', 'star-args', 'exec-used') + @check_messages('bad-builtin', 'star-args', + 'exec-used', 'missing-reversed-argument', + 'bad-reversed-sequence') def visit_callfunc(self, node): """visit a CallFunc node -> check if this is not a blacklisted builtin call and check for * or ** use @@ -611,6 +695,8 @@ def visit_callfunc(self, node): name in node.root()): if name == 'exec': self.add_message('exec-used', node=node) + elif name == 'reversed': + self._check_reversed(node) if name in self.config.bad_functions: self.add_message('bad-builtin', node=node, args=name) if node.starargs or node.kwargs: @@ -675,7 +761,55 @@ def _check_not_in_finally(self, node, node_name, breaker_classes=()): return _node = _parent _parent = _node.parent + + def _check_reversed(self, node): + """ check that the argument to `reversed` is a sequence """ + try: + argument = safe_infer(get_argument_from_call(node, position=0)) + except NoSuchArgumentError: + self.add_message('missing-reversed-argument', node=node) + else: + if argument is astroid.YES: + return + if argument is None: + # nothing was infered + # try to see if we have iter() + if (isinstance(node.args[0], astroid.CallFunc) and + node.args[0].func.name == 'iter'): + func = node.args[0].func.infer().next() + if is_builtin_object(func): + self.add_message('bad-reversed-sequence', node=node) + return + if isinstance(argument, astroid.Instance): + if (argument._proxied.name == 'dict' and + is_builtin_object(argument._proxied)): + self.add_message('bad-reversed-sequence', node=node) + return + elif any(ancestor.name == 'dict' and is_builtin_object(ancestor) + for ancestor in argument._proxied.ancestors()): + # mappings aren't accepted by reversed() + self.add_message('bad-reversed-sequence', node=node) + return + + for methods in REVERSED_METHODS: + for meth in methods: + try: + argument.getattr(meth) + except astroid.NotFoundError: + break + else: + break + else: + # check if it is a .deque. It doesn't seem that + # we can retrieve special methods + # from C implemented constructs + if argument._proxied.qname().endswith(".deque"): + return + self.add_message('bad-reversed-sequence', node=node) + elif not isinstance(argument, (astroid.List, astroid.Tuple)): + # everything else is not a proper sequence for reversed() + self.add_message('bad-reversed-sequence', node=node) class NameChecker(_BasicChecker): msgs = { diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py index fd761463..3f0a22d3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py @@ -1,4 +1,4 @@ -# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """classes checker for Python code """ from __future__ import generators @@ -183,7 +183,7 @@ class ClassChecker(BaseChecker): options = (('ignore-iface-methods', {'default' : (#zope interface 'isImplementedBy', 'deferred', 'extends', 'names', - 'namesAndDescriptions', 'queryDescriptionFor', 'getBases', + 'namesAndDescriptions', 'queryDescriptionFor', 'getBases', 'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue', 'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue', 'isImplementedByInstancesOf', @@ -355,10 +355,10 @@ def _check_exit(self, node): positional = sum(1 for arg in node.args.args if arg.name != 'self') if positional < 3 and not node.args.vararg: self.add_message('bad-context-manager', - node=node) + node=node) elif positional > 3: self.add_message('bad-context-manager', - node=node) + node=node) def leave_function(self, node): """on method node, check if this method couldn't be a function diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py index f3b58821..cfd2d808 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """check for signs of poor design""" from astroid import Function, If, InferenceError @@ -26,42 +26,6 @@ # regexp for ignored argument name IGNORED_ARGUMENT_NAMES = re.compile('_.*') -SPECIAL_METHODS = [('Context manager', set(('__enter__', - '__exit__',))), - ('Container', set(('__len__', - '__getitem__',))), - ('Mutable container', set(('__setitem__', - '__delitem__',))), - ] - -class SpecialMethodChecker(object): - """A functor that checks for consistency of a set of special methods""" - def __init__(self, methods_found, on_error): - """Stores the set of __x__ method names that were found in the - class and a callable that will be called with args to R0024 if - the check fails - """ - self.methods_found = methods_found - self.on_error = on_error - - def __call__(self, methods_required, protocol): - """Checks the set of method names given to __init__ against the set - required. - - If they are all present, returns true. - If they are all absent, returns false. - If some are present, reports the error and returns false. - """ - required_methods_found = methods_required & self.methods_found - if required_methods_found == methods_required: - return True - if required_methods_found: - required_methods_missing = methods_required - self.methods_found - self.on_error((protocol, - ', '.join(sorted(required_methods_found)), - ', '.join(sorted(required_methods_missing)))) - return False - def class_is_abstract(klass): """return true if the given class node should be considered as an abstract @@ -121,10 +85,6 @@ def class_is_abstract(klass): 'R0923': ('Interface not implemented', 'interface-not-implemented', 'Used when an interface class is not implemented anywhere.'), - 'R0924': ('Badly implemented %s, implements %s but not %s', - 'incomplete-protocol', - 'A class implements some of the special methods for a particular \ - protocol, but not all of them') } @@ -289,13 +249,6 @@ def leave_class(self, node): # stop here for exception, metaclass and interface classes if node.type != 'class': return - # Does the class implement special methods consitently? - # If so, don't enforce minimum public methods. - check_special = SpecialMethodChecker( - special_methods, lambda args: self.add_message('R0924', node=node, args=args)) - protocols = [check_special(pmethods, pname) for pname, pmethods in SPECIAL_METHODS] - if True in protocols: - return # Does the class contain more than 5 public methods ? if nb_public_methods < self.config.min_public_methods: self.add_message('R0903', node=node, @@ -379,7 +332,7 @@ def visit_if(self, node): """increments the branches counter""" branches = 1 # don't double count If nodes coming from some 'elif' - if node.orelse and (len(node.orelse)>1 or + if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)): branches += 1 self._inc_branch(branches) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py index 8ac00a5f..f85deb73 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py @@ -11,7 +11,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """exceptions handling (raising, catching, exceptions classes) checker """ import sys @@ -25,7 +25,25 @@ from pylint.checkers.utils import is_empty, is_raising, check_messages from pylint.interfaces import IAstroidChecker +def infer_bases(klass): + """ Fully infer the bases of the klass node. + This doesn't use .ancestors(), because we need + the non-inferable nodes (YES nodes), + which can't be retrieved from .ancestors() + """ + for base in klass.bases: + try: + inferit = base.infer().next() + except astroid.InferenceError: + continue + if inferit is YES: + yield inferit + else: + for base in infer_bases(inferit): + yield base + +PY3K = sys.version_info >= (3, 0) OVERGENERAL_EXCEPTIONS = ('Exception',) MSGS = { @@ -38,6 +56,13 @@ 'raising-bad-type', 'Used when something which is neither a class, an instance or a \ string is raised (i.e. a `TypeError` will be raised).'), + 'E0703': ('Exception context set to something which is not an ' + 'exception, nor None', + 'bad-exception-context', + 'Used when using the syntax "raise ... from ...", ' + 'where the exception context is not an exception, ' + 'nor None.', + {'minversion': (3, 0)}), 'E0710': ('Raising a new style class which doesn\'t inherit from BaseException', 'raising-non-exception', 'Used when a new style class which doesn\'t inherit from \ @@ -50,7 +75,7 @@ 'catching-non-exception', 'Used when a class which doesn\'t inherit from \ BaseException is used as an exception in an except clause.'), - + 'W0701': ('Raising a string exception', 'raising-string', 'Used when a string exception is raised.'), @@ -109,12 +134,27 @@ class ExceptionsChecker(BaseChecker): ), ) - @check_messages('W0701', 'W0710', 'E0702', 'E0710', 'E0711') + @check_messages('W0701', 'W0710', 'E0702', 'E0710', 'E0711', + 'bad-exception-context') def visit_raise(self, node): """visit raise possibly inferring value""" # ignore empty raise if node.exc is None: return + if PY3K and node.cause: + try: + cause = node.cause.infer().next() + except astroid.InferenceError: + pass + else: + if isinstance(cause, astroid.Const): + if cause.value is not None: + self.add_message('bad-exception-context', + node=node) + elif (not isinstance(cause, astroid.Class) and + not inherit_from_std_ex(cause)): + self.add_message('bad-exception-context', + node=node) expr = node.exc if self._check_raise_value(node, expr): return @@ -141,10 +181,10 @@ def _check_raise_value(self, node, expr): isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple, astroid.Module, astroid.Function)): self.add_message('E0702', node=node, args=expr.name) - elif ( (isinstance(expr, astroid.Name) and expr.name == 'NotImplemented') - or (isinstance(expr, astroid.CallFunc) and - isinstance(expr.func, astroid.Name) and - expr.func.name == 'NotImplemented') ): + elif ((isinstance(expr, astroid.Name) and expr.name == 'NotImplemented') + or (isinstance(expr, astroid.CallFunc) and + isinstance(expr.func, astroid.Name) and + expr.func.name == 'NotImplemented')): self.add_message('E0711', node=node) elif isinstance(expr, astroid.BinOp) and expr.op == '%': self.add_message('W0701', node=node) @@ -211,12 +251,19 @@ def visit_tryexcept(self, node): and exc.root().name == EXCEPTIONS_MODULE and nb_handlers == 1 and not is_raising(handler.body)): self.add_message('W0703', args=exc.name, node=handler.type) - + if (not inherit_from_std_ex(exc) and exc.root().name != BUILTINS_NAME): - self.add_message('catching-non-exception', - node=handler.type, - args=(exc.name, )) + # try to see if the exception is based on a C based + # exception, by infering all the base classes and + # looking for inference errors + bases = infer_bases(exc) + fully_infered = all(inferit is not YES + for inferit in bases) + if fully_infered: + self.add_message('catching-non-exception', + node=handler.type, + args=(exc.name, )) exceptions_classes += excs diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py index f307f33d..e498af75 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py @@ -11,7 +11,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Python code format's checker. By default try to follow Guido's style guide : @@ -36,7 +36,9 @@ from pylint.utils import WarningScope, OPTION_RGX _KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not', - 'print', 'raise', 'return', 'while', 'yield'] + 'raise', 'return', 'while', 'yield'] +if sys.version_info < (3, 0): + _KEYWORD_TOKENS.append('print') _SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=', '+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=', @@ -212,7 +214,6 @@ def _check_keyword_parentheses(self, tokens, start): if tokens[start+1][1] != '(': return - found_comma = False found_and_or = False depth = 0 keyword_token = tokens[start][1] @@ -300,7 +301,7 @@ def _check_equals_spacing(self, tokens, i): else: self._check_space(tokens, i, (_MUST, _MUST)) - def _open_lambda(self, unused_tokens, unused_i): + def _open_lambda(self, tokens, i): # pylint:disable=unused-argument self._bracket_stack.append('lambda') def _handle_colon(self, tokens, i): @@ -355,7 +356,6 @@ def _name_construct(token): pairs = [(tokens[i-1], tokens[i]), (tokens[i], tokens[i+1])] for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)): - current_idx = 1 - other_idx if token_pair[other_idx][0] in _EOL or policy == _IGNORE: continue @@ -501,7 +501,7 @@ def process_tokens(self, tokens): if line_num > self.config.max_module_lines: self.add_message('C0302', args=line_num, line=1) - @check_messages('C0321' ,'C03232', 'C0323', 'C0324') + @check_messages('C0321', 'C03232', 'C0323', 'C0324') def visit_default(self, node): """check the node line number and check it if not yet done""" if not node.is_statement: @@ -606,7 +606,7 @@ def check_indent_level(self, string, expected, line_num): self.add_message('W0312', args=args, line=line_num) return level suppl += string[0] - string = string [1:] + string = string[1:] if level != expected or suppl: i_type = 'spaces' if indent[0] == '\t': diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py index 1dd77879..df4304a4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """imports checkers for Python code""" from logilab.common.graph import get_cycles, DotBackend @@ -93,7 +93,7 @@ def dependencies_graph(filename, dep_info): """write dependencies as a dot (graphviz) file """ done = {} - printer = DotBackend(filename[:-4], rankdir = "LR") + printer = DotBackend(filename[:-4], rankdir='LR') printer.emit('URL="." node[shape="box"]') for modname, dependencies in sorted(dep_info.iteritems()): done[modname] = 1 @@ -301,11 +301,10 @@ def _add_imported_module(self, node, importedmodname): importedmodname, set()) if not context_name in importedmodnames: importedmodnames.add(context_name) - if is_standard_module(importedmodname, (self.package_dir(),)): - # update import graph - mgraph = self.import_graph.setdefault(context_name, set()) - if not importedmodname in mgraph: - mgraph.add(importedmodname) + # update import graph + mgraph = self.import_graph.setdefault(context_name, set()) + if not importedmodname in mgraph: + mgraph.add(importedmodname) def _check_deprecated_module(self, node, mod_path): """check if the module is deprecated""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py index 6986ca4e..a6b0145d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py @@ -10,7 +10,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """checker for use of Python logging """ @@ -91,7 +91,7 @@ def visit_callfunc(self, node): and ancestor.parent.name == 'logging')))] except astroid.exceptions.InferenceError: return - if (node.func.expr.name != self._logging_name and not logger_class): + if node.func.expr.name != self._logging_name and not logger_class: return self._check_convenience_methods(node) self._check_log_methods(node) @@ -129,7 +129,7 @@ def _check_format_string(self, node, format_arg): node: AST node to be checked. format_arg: Index of the format string in the node arguments. """ - num_args = self._count_supplied_tokens(node.args[format_arg + 1:]) + num_args = _count_supplied_tokens(node.args[format_arg + 1:]) if not num_args: # If no args were supplied, then all format strings are valid - # don't check any further. @@ -147,9 +147,10 @@ def _check_format_string(self, node, format_arg): # Keyword checking on logging strings is complicated by # special keywords - out of scope. return - except utils.UnsupportedFormatCharacter, e: - c = format_string[e.index] - self.add_message('E1200', node=node, args=(c, ord(c), e.index)) + except utils.UnsupportedFormatCharacter, ex: + char = format_string[ex.index] + self.add_message('E1200', node=node, + args=(char, ord(char), ex.index)) return except utils.IncompleteFormatString: self.add_message('E1201', node=node) @@ -159,20 +160,21 @@ def _check_format_string(self, node, format_arg): elif num_args < required_num_args: self.add_message('E1206', node=node) - def _count_supplied_tokens(self, args): - """Counts the number of tokens in an args list. - The Python log functions allow for special keyword arguments: func, - exc_info and extra. To handle these cases correctly, we only count - arguments that aren't keywords. +def _count_supplied_tokens(args): + """Counts the number of tokens in an args list. - Args: - args: List of AST nodes that are arguments for a log format string. + The Python log functions allow for special keyword arguments: func, + exc_info and extra. To handle these cases correctly, we only count + arguments that aren't keywords. - Returns: - Number of AST nodes that aren't keywords. - """ - return sum(1 for arg in args if not isinstance(arg, astroid.Keyword)) + Args: + args: List of AST nodes that are arguments for a log format string. + + Returns: + Number of AST nodes that aren't keywords. + """ + return sum(1 for arg in args if not isinstance(arg, astroid.Keyword)) def register(linter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py index 69959090..9c49825e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py @@ -10,7 +10,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE). http://www.logilab.fr/ -- mailto:contact@logilab.fr diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py index 98321954..027d512f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py @@ -1,4 +1,4 @@ -# Copyright (c) 2005-2006 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2005-2013 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """check for new / old style related problems """ import sys @@ -37,7 +37,8 @@ 'E1004': ('Missing argument to super()', 'missing-super-argument', 'Used when the super builtin didn\'t receive an \ - argument on Python 2'), + argument on Python 2', + {'maxversion': (3, 0)}), 'W1001': ('Use of "property" on an old style class', 'property-on-old-class', 'Used when PyLint detect the use of the builtin "property" \ diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py index a8e4367c..71fecf68 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). http://www.logilab.fr/ -- mailto:contact@logilab.fr @@ -68,11 +68,11 @@ class RawMetricsChecker(BaseTokenChecker): # configuration section name name = 'metrics' # configuration options - options = ( ) + options = () # messages msgs = {} # reports - reports = ( ('RP0701', 'Raw metrics', report_raw_stats), ) + reports = (('RP0701', 'Raw metrics', report_raw_stats),) def __init__(self, linter): BaseTokenChecker.__init__(self, linter) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py index 26b37255..cf671bf6 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py @@ -13,7 +13,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """a similarities / code duplication command line tool and pylint checker """ import sys @@ -63,15 +63,15 @@ def _compute_sims(self): duplicate = no_duplicates.setdefault(num, []) for couples in duplicate: if (lineset1, idx1) in couples or (lineset2, idx2) in couples: - couples.add( (lineset1, idx1) ) - couples.add( (lineset2, idx2) ) + couples.add((lineset1, idx1)) + couples.add((lineset2, idx2)) break else: - duplicate.append( set([(lineset1, idx1), (lineset2, idx2)]) ) + duplicate.append(set([(lineset1, idx1), (lineset2, idx2)])) sims = [] for num, ensembles in no_duplicates.iteritems(): for couples in ensembles: - sims.append( (num, couples) ) + sims.append((num, couples)) sims.sort() sims.reverse() return sims @@ -104,7 +104,7 @@ def _find_common(self, lineset1, lineset2): while index1 < len(lineset1): skip = 1 num = 0 - for index2 in find( lineset1[index1] ): + for index2 in find(lineset1[index1]): non_blank = 0 for num, ((_, line1), (_, line2)) in enumerate( izip(lines1(index1), lines2(index2))): @@ -210,7 +210,7 @@ def _mk_index(self): index = {} for line_no, line in enumerate(self._stripped_lines): if line: - index.setdefault(line, []).append( line_no ) + index.setdefault(line, []).append(line_no) return index @@ -260,7 +260,7 @@ class SimilarChecker(BaseChecker, Similar): ), ) # reports - reports = ( ('RP0801', 'Duplication', report_similarities), ) + reports = (('RP0801', 'Duplication', report_similarities),) def __init__(self, linter=None): BaseChecker.__init__(self, linter) @@ -349,9 +349,9 @@ def Run(argv=None): usage() elif opt in ('-i', '--ignore-comments'): ignore_comments = True - elif opt in ('--ignore-docstrings'): + elif opt in ('--ignore-docstrings',): ignore_docstrings = True - elif opt in ('--ignore-imports'): + elif opt in ('--ignore-imports',): ignore_imports = True if not args: usage(1) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py index 07e1fbe1..8cb78f4d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Checkers for various standard library functions.""" import re @@ -21,7 +21,7 @@ import astroid from pylint.interfaces import IAstroidChecker -from pylint.checkers import BaseChecker, BaseTokenChecker +from pylint.checkers import BaseChecker from pylint.checkers import utils _VALID_OPEN_MODE_REGEX = r'^(r?U|[rwa]\+?b?)$' diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py index 42563da7..b905d280 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py @@ -14,7 +14,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Checker for string formatting operations. """ @@ -66,11 +66,11 @@ 'E1305': ("Too many arguments for format string", "too-many-format-args", "Used when a format string that uses unnamed conversion \ - specifiers is given too few arguments."), + specifiers is given too many arguments."), 'E1306': ("Not enough arguments for format string", "too-few-format-args", "Used when a format string that uses unnamed conversion \ - specifiers is given too many arguments"), + specifiers is given too few arguments"), } OTHER_NODES = (astroid.Const, astroid.List, astroid.Backquote, @@ -233,7 +233,7 @@ def process_string_token(self, token, start_row, start_col): if c in '\'\"': quote_char = c break - prefix = token[:i].lower() # markers like u, b, r. + prefix = token[:i].lower() # markers like u, b, r. after_prefix = token[i:] if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char: string_body = after_prefix[3:-3] diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py index 69883592..5e9ae1b0 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py @@ -1,4 +1,4 @@ -# Copyright (c) 2006-2010 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2006-2013 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """try to find more bugs in the code using astroid inference capabilities """ @@ -292,7 +292,7 @@ def visit_callfunc(self, node): # Built-in functions have no argument information. return - if len( called.argnames() ) != len( set( called.argnames() ) ): + if len(called.argnames()) != len(set(called.argnames())): # Duplicate parameter name (see E9801). We can't really make sense # of the function call in this case, so just return. return diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py index 72a9733d..78deb4e7 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py @@ -14,7 +14,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """some functions that may be useful for various checkers """ @@ -69,8 +69,9 @@ def clobber_in_except(node): scope, stmts = node.lookup(name) if (stmts and not isinstance(stmts[0].ass_type(), - (astroid.Assign, astroid.AugAssign, astroid.ExceptHandler))): - return (True, (name, 'outer scope (line %s)' % (stmts[0].fromlineno,))) + (astroid.Assign, astroid.AugAssign, + astroid.ExceptHandler))): + return (True, (name, 'outer scope (line %s)' % stmts[0].fromlineno)) return (False, None) @@ -123,7 +124,7 @@ def is_empty(body): def is_builtin_object(node): """Returns True if the given node is an object from the __builtin__ module.""" - return node and node.root().name == '__builtin__' + return node and node.root().name == BUILTINS_NAME def is_builtin(name): # was is_native_builtin """return true if could be considered as a builtin defined by python @@ -153,8 +154,10 @@ def is_defined_before(var_node): elif isinstance(_node, astroid.With): for expr, vars in _node.items: if expr.parent_of(var_node): - break - if vars and vars.name == varname: + break + if (vars and + isinstance(vars, astroid.AssName) and + vars.name == varname): return True elif isinstance(_node, (astroid.Lambda, astroid.Function)): if _node.args.is_argument(varname): @@ -164,7 +167,7 @@ def is_defined_before(var_node): break elif isinstance(_node, astroid.ExceptHandler): if isinstance(_node.name, astroid.AssName): - ass_node=_node.name + ass_node = _node.name if ass_node.name == varname: return True _node = _node.parent @@ -176,7 +179,7 @@ def is_defined_before(var_node): for ass_node in _node.nodes_of_class(astroid.AssName): if ass_node.name == varname: return True - for imp_node in _node.nodes_of_class( (astroid.From, astroid.Import)): + for imp_node in _node.nodes_of_class((astroid.From, astroid.Import)): if varname in [name[1] or name[0] for name in imp_node.names]: return True _node = _node.previous_sibling() @@ -301,52 +304,52 @@ def next_char(i): return (i, format_string[i]) i = 0 while i < len(format_string): - c = format_string[i] - if c == '%': - i, c = next_char(i) + char = format_string[i] + if char == '%': + i, char = next_char(i) # Parse the mapping key (optional). key = None - if c == '(': + if char == '(': depth = 1 - i, c = next_char(i) + i, char = next_char(i) key_start = i while depth != 0: - if c == '(': + if char == '(': depth += 1 - elif c == ')': + elif char == ')': depth -= 1 - i, c = next_char(i) + i, char = next_char(i) key_end = i - 1 key = format_string[key_start:key_end] # Parse the conversion flags (optional). - while c in '#0- +': - i, c = next_char(i) + while char in '#0- +': + i, char = next_char(i) # Parse the minimum field width (optional). - if c == '*': + if char == '*': num_args += 1 - i, c = next_char(i) + i, char = next_char(i) else: - while c in string.digits: - i, c = next_char(i) + while char in string.digits: + i, char = next_char(i) # Parse the precision (optional). - if c == '.': - i, c = next_char(i) - if c == '*': + if char == '.': + i, char = next_char(i) + if char == '*': num_args += 1 - i, c = next_char(i) + i, char = next_char(i) else: - while c in string.digits: - i, c = next_char(i) + while char in string.digits: + i, char = next_char(i) # Parse the length modifier (optional). - if c in 'hlL': - i, c = next_char(i) + if char in 'hlL': + i, char = next_char(i) # Parse the conversion type (mandatory). - if c not in 'diouxXeEfFgGcrs%': + if char not in 'diouxXeEfFgGcrs%': raise UnsupportedFormatCharacter(i) if key: keys.add(key) - elif c != '%': + elif char != '%': num_args += 1 i += 1 return keys, num_args diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py index 7f4ff1be..cbb14317 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py @@ -12,16 +12,18 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """variables checkers for Python code """ - +import os import sys from copy import copy import astroid from astroid import are_exclusive, builtin_lookup, AstroidBuildingException +from logilab.common.modutils import file_from_modpath + from pylint.interfaces import IAstroidChecker from pylint.checkers import BaseChecker from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin, @@ -51,6 +53,20 @@ def overridden_method(klass, name): return meth_node return None +def _get_unpacking_extra_info(node, infered): + """return extra information to add to the message for unpacking-non-sequence + and unbalanced-tuple-unpacking errors + """ + more = '' + infered_module = infered.root().name + if node.root().name == infered_module: + if node.lineno == infered.lineno: + more = ' %s' % infered.as_string() + elif infered.lineno: + more = ' defined at line %s' % infered.lineno + elif infered.lineno: + more = ' defined at line %s of %s' % (infered.lineno, infered_module) + return more MSGS = { 'E0601': ('Using variable %r before assignment', @@ -120,13 +136,12 @@ def overridden_method(klass, name): the loop.'), 'W0632': ('Possible unbalanced tuple unpacking with ' - 'sequence at line %s: ' + 'sequence%s: ' 'left side has %d label(s), right side has %d value(s)', 'unbalanced-tuple-unpacking', 'Used when there is an unbalanced tuple unpacking in assignment'), - 'W0633': ('Attempting to unpack a non-sequence with ' - 'non-sequence at line %s', + 'W0633': ('Attempting to unpack a non-sequence%s', 'unpacking-non-sequence', 'Used when something which is not ' 'a sequence is used in an unpack assignment'), @@ -204,7 +219,25 @@ def leave_module(self, node): del not_consumed[elt_name] continue if elt_name not in node.locals: - self.add_message('E0603', args=elt_name, node=elt) + if not node.package: + self.add_message('undefined-all-variable', + args=elt_name, + node=elt) + else: + basename = os.path.splitext(node.file)[0] + if os.path.basename(basename) == '__init__': + name = node.name + "." + elt_name + try: + file_from_modpath(name.split(".")) + except ImportError: + self.add_message('undefined-all-variable', + args=elt_name, + node=elt) + except SyntaxError as exc: + # don't yield an syntax-error warning, + # because it will be later yielded + # when the file will be checked + pass # don't check unused imports in __init__ files if not self.config.init_import and node.package: return @@ -494,6 +527,12 @@ def visit_name(self, node): # defined in global or builtin scope if defframe.root().lookup(name)[1]: maybee0601 = False + else: + # check if we have a nonlocal + if name in defframe.locals: + maybee0601 = not any(isinstance(child, astroid.Nonlocal) + and name in child.names + for child in defframe.get_children()) if (maybee0601 and stmt.fromlineno <= defstmt.fromlineno and not is_defined_before(node) @@ -572,41 +611,30 @@ def _check_unpacking(self, infered, node, targets): """ Check for unbalanced tuple unpacking and unpacking non sequences. """ + if infered is astroid.YES: + return if isinstance(infered, (astroid.Tuple, astroid.List)): + # attempt to check unpacking is properly balanced values = infered.itered() if len(targets) != len(values): - if node.root().name == infered.root().name: - location = infered.lineno or 'unknown' - else: - location = '%s (%s)' % (infered.lineno or 'unknown', - infered.root().name) - - self.add_message('unbalanced-tuple-unpacking', - node=node, - args=(location, + self.add_message('unbalanced-tuple-unpacking', node=node, + args=(_get_unpacking_extra_info(node, infered), len(targets), len(values))) - else: - if infered is astroid.YES: - return - + # attempt to check unpacking may be possible (ie RHS is iterable) + elif isinstance(infered, astroid.Instance): for meth in ('__iter__', '__getitem__'): try: infered.getattr(meth) + break except astroid.NotFoundError: continue - else: - break else: - if node.root().name == infered.root().name: - location = infered.lineno or 'unknown' - else: - location = '%s (%s)' % (infered.lineno or 'unknown', - infered.root().name) - - self.add_message('unpacking-non-sequence', - node=node, - args=(location, )) + self.add_message('unpacking-non-sequence', node=node, + args=(_get_unpacking_extra_info(node, infered),)) + else: + self.add_message('unpacking-non-sequence', node=node, + args=(_get_unpacking_extra_info(node, infered),)) def _check_module_attrs(self, node, module, module_names): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py index a65a1162..cf30c182 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py @@ -1,4 +1,4 @@ -# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later @@ -10,12 +10,13 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """utilities for Pylint configuration : * pylintrc * pylint.d (PYLINTHOME) """ +from __future__ import with_statement import pickle import os @@ -49,7 +50,8 @@ def load_results(base): """ data_file = get_pdata_path(base, 1) try: - return pickle.load(open(data_file)) + with open(data_file) as stream: + return pickle.load(stream) except: return {} @@ -67,7 +69,8 @@ def save_results(results, base): print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME data_file = get_pdata_path(base, 1) try: - pickle.dump(results, open(data_file, _PICK_MOD)) + with open(data_file, _PICK_MOD) as stream: + pickle.dump(results, stream) except (IOError, OSError), ex: print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex) @@ -106,12 +109,12 @@ def find_pylintrc(): ENV_HELP = ''' The following environment variables are used: * PYLINTHOME - path to the directory where data of persistent run will be stored. If not -found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working + Path to the directory where the persistent for the run will be stored. If +not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working directory). * PYLINTRC - path to the configuration file. If not found, it will use the first -existing file among (~/.pylintrc, /etc/pylintrc). + Path to the configuration file. See the documentation for the method used +to search for configuration file. ''' % globals() # evaluation messages ######################################################### diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py index e0754ce0..50f2c839 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py @@ -9,7 +9,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Interfaces for PyLint objects""" from logilab.common.interface import Interface diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py index 812a953b..2d781b76 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ %prog [options] module_or_package Check that a module satisfies a coding standard (and more !). @@ -82,48 +82,49 @@ def _get_python_path(filepath): module (unable to find it for instance).'), 'F0002': ('%s: %s', 'astroid-error', - 'Used when an unexpected error occurred while building the Astroid \ - representation. This is usually accompanied by a traceback. \ - Please report such errors !'), + 'Used when an unexpected error occurred while building the ' + 'Astroid representation. This is usually accompanied by a ' + 'traceback. Please report such errors !'), 'F0003': ('ignored builtin module %s', 'ignored-builtin-module', - 'Used to indicate that the user asked to analyze a builtin module \ - which has been skipped.'), + 'Used to indicate that the user asked to analyze a builtin ' + 'module which has been skipped.'), 'F0004': ('unexpected inferred value %s', 'unexpected-inferred-value', - 'Used to indicate that some value of an unexpected type has been \ - inferred.'), + 'Used to indicate that some value of an unexpected type has been ' + 'inferred.'), 'F0010': ('error while code parsing: %s', 'parse-error', - 'Used when an exception occured while building the Astroid \ - representation which could be handled by astroid.'), + 'Used when an exception occured while building the Astroid ' + 'representation which could be handled by astroid.'), 'I0001': ('Unable to run raw checkers on built-in module %s', 'raw-checker-failed', - 'Used to inform that a built-in module has not been checked \ - using the raw checkers.'), + 'Used to inform that a built-in module has not been checked ' + 'using the raw checkers.'), 'I0010': ('Unable to consider inline option %r', 'bad-inline-option', - 'Used when an inline option is either badly formatted or can\'t \ - be used inside modules.'), + 'Used when an inline option is either badly formatted or can\'t ' + 'be used inside modules.'), 'I0011': ('Locally disabling %s', 'locally-disabled', - 'Used when an inline option disables a message or a messages \ - category.'), + 'Used when an inline option disables a message or a messages ' + 'category.'), 'I0012': ('Locally enabling %s', 'locally-enabled', - 'Used when an inline option enables a message or a messages \ - category.'), + 'Used when an inline option enables a message or a messages ' + 'category.'), 'I0013': ('Ignoring entire file', 'file-ignored', 'Used to inform that the file will not be checked'), - 'I0014': ('Used deprecated directive "pylint:disable-all" or "pylint:disable=all"', + 'I0014': ('Used deprecated directive "pylint:disable-all" or ' + '"pylint:disable=all"', 'deprecated-disable-all', 'You should preferably use "pylint:skip-file" as this directive ' - 'has a less confusing name. Do this only if you are sure that all ' - 'people running Pylint on your code have version >= 0.26'), + 'has a less confusing name. Do this only if you are sure that ' + 'all people running Pylint on your code have version >= 0.26'), 'I0020': ('Suppressed %s (from line %d)', 'suppressed-message', 'A message was triggered on a line, but suppressed explicitly ' @@ -134,7 +135,11 @@ def _get_python_path(filepath): 'useless-suppression', 'Reported when a message is explicitly disabled for a line or ' 'a block of code, but never triggered.'), - + 'I0022': ('Deprecated pragma "pylint:disable-msg" or "pylint:enable-msg"', + 'deprecated-pragma', + 'You should preferably use "pylint:disable" or "pylint:enable" ' + 'instead of the deprecated suppression pragma style ' + '"pylint:disable-msg" or "pylint:enable-msg"'), 'E0001': ('%s', 'syntax-error', @@ -177,8 +182,8 @@ def make_options(): return (('ignore', {'type' : 'csv', 'metavar' : '[,...]', 'dest' : 'black_list', 'default' : ('CVS',), - 'help' : 'Add files or directories to the blacklist. \ -They should be base names, not paths.'}), + 'help' : 'Add files or directories to the blacklist. ' + 'They should be base names, not paths.'}), ('persistent', {'default': True, 'type' : 'yn', 'metavar' : '', 'level': 1, @@ -187,8 +192,9 @@ def make_options(): ('load-plugins', {'type' : 'csv', 'metavar' : '', 'default' : (), 'level': 1, - 'help' : 'List of plugins (as comma separated values of \ -python modules names) to load, usually to register additional checkers.'}), + 'help' : 'List of plugins (as comma separated values of ' + 'python modules names) to load, usually to register ' + 'additional checkers.'}), ('output-format', {'default': 'text', 'type': 'string', 'metavar' : '', @@ -202,22 +208,23 @@ def make_options(): ('files-output', {'default': 0, 'type' : 'yn', 'metavar' : '', 'group': 'Reports', 'level': 1, - 'help' : 'Put messages in a separate file for each module / \ -package specified on the command line instead of printing them on stdout. \ -Reports (if any) will be written in a file name "pylint_global.[txt|html]".'}), + 'help' : 'Put messages in a separate file for each module / ' + 'package specified on the command line instead of printing ' + 'them on stdout. Reports (if any) will be written in a file ' + 'name "pylint_global.[txt|html]".'}), ('reports', {'default': 1, 'type' : 'yn', 'metavar' : '', 'short': 'r', 'group': 'Reports', - 'help' : 'Tells whether to display a full report or only the\ - messages'}), + 'help' : 'Tells whether to display a full report or only the ' + 'messages'}), ('evaluation', {'type' : 'string', 'metavar' : '', 'group': 'Reports', 'level': 1, - 'default': '10.0 - ((float(5 * error + warning + refactor + \ -convention) / statement) * 10)', + 'default': '10.0 - ((float(5 * error + warning + refactor + ' + 'convention) / statement) * 10)', 'help' : 'Python expression which should return a note less \ than 10 (10 is the highest note). You have access to the variables errors \ warning, statement which respectively contain the number of errors / warnings\ @@ -227,8 +234,8 @@ def make_options(): ('comment', {'default': 0, 'type' : 'yn', 'metavar' : '', 'group': 'Reports', 'level': 1, - 'help' : 'Add a comment according to your evaluation note. \ -This is used by the global evaluation report (RP0004).'}), + 'help' : 'Add a comment according to your evaluation note. ' + 'This is used by the global evaluation report (RP0004).'}), ('enable', {'type' : 'csv', 'metavar': '', @@ -272,7 +279,8 @@ def make_options(): ('Reports', 'Options related to output formating and reporting'), ) - def __init__(self, options=(), reporter=None, option_groups=(), pylintrc=None): + def __init__(self, options=(), reporter=None, option_groups=(), + pylintrc=None): # some stuff has to be done before ancestors initialization... # # checkers / reporter / astroid manager @@ -369,7 +377,8 @@ def set_option(self, optname, value, action=None, optdict=None): """overridden from configuration.OptionsProviderMixin to handle some special options """ - if optname in self._options_methods or optname in self._bw_options_methods: + if optname in self._options_methods or \ + optname in self._bw_options_methods: if value: try: meth = self._options_methods[optname] @@ -379,9 +388,9 @@ def set_option(self, optname, value, action=None, optdict=None): optname, optname.split('-')[0]), DeprecationWarning) value = check_csv(None, optname, value) if isinstance(value, (list, tuple)): - for _id in value : + for _id in value: meth(_id) - else : + else: meth(value) elif optname == 'output-format': self._reporter_name = value @@ -452,7 +461,8 @@ def process_tokens(self, tokens): match = OPTION_RGX.search(line) if match is None: continue - if match.group(1).strip() == "disable-all" or match.group(1).strip() == 'skip-file': + if match.group(1).strip() == "disable-all" or \ + match.group(1).strip() == 'skip-file': if match.group(1).strip() == "disable-all": self.add_message('I0014', line=start[0]) self.add_message('I0013', line=start[0]) @@ -470,9 +480,8 @@ def process_tokens(self, tokens): meth = self._options_methods[opt] except KeyError: meth = self._bw_options_methods[opt] - warn('%s is deprecated, replace it with %s (%s, line %s)' % ( - opt, opt.split('-')[0], self.current_file, line), - DeprecationWarning) + # found a "(dis|en)able-msg" pragma deprecated suppresssion + self.add_message('deprecated-pragma', line=start[0]) for msgid in splitstrip(value): try: if (opt, msgid) == ('disable', 'all'): @@ -563,7 +572,7 @@ def prepare_checkers(self): checker.active_msgs = messages return neededcheckers - def should_analyze_file(self, modname, path): + def should_analyze_file(self, modname, path): # pylint: disable=unused-argument """Returns whether or not a module should be checked. This implementation returns True for all inputs, indicating that all @@ -707,8 +716,8 @@ def check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers): def open(self): """initialize counters""" - self.stats = { 'by_module' : {}, - 'by_msg' : {}, + self.stats = {'by_module' : {}, + 'by_msg' : {}, } for msg_cat in MSG_TYPES.itervalues(): self.stats[msg_cat] = 0 @@ -941,7 +950,7 @@ def __init__(self, args, reporter=None, exit=True): ('generate-man', {'action' : 'callback', 'callback' : self.cb_generate_manpage, 'group': 'Commands', - 'help' : "Generate pylint's man page.",'hide': True}), + 'help' : "Generate pylint's man page.", 'hide': True}), ('errors-only', {'action' : 'callback', 'callback' : self.cb_error_mode, @@ -1021,7 +1030,8 @@ def __init__(self, args, reporter=None, exit=True): if self.linter.config.profile: print >> sys.stderr, '** profiled run' import cProfile, pstats - cProfile.runctx('linter.check(%r)' % args, globals(), locals(), 'stones.prof' ) + cProfile.runctx('linter.check(%r)' % args, globals(), locals(), + 'stones.prof') data = pstats.Stats('stones.prof') data.strip_dirs() data.sort_stats('time', 'calls') diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py index 53064c73..e3d93efd 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py @@ -10,7 +10,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """utilities methods and classes for reporters""" import sys diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py index a51e0e7b..71d46eba 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/html.py @@ -10,7 +10,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """HTML reporter""" import sys diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py index 555efc80..bd99837b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/text.py @@ -10,7 +10,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Plain text reporters: :text: the default one grouping messages by module @@ -75,8 +75,8 @@ class ParseableTextReporter(TextReporter): line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}' def __init__(self, output=None): - warnings.warn('%s output format is deprecated. This is equivalent to --msg-template=%s' - % (self.name, self.line_format)) + warnings.warn('%s output format is deprecated. This is equivalent ' + 'to --msg-template=%s' % (self.name, self.line_format)) TextReporter.__init__(self, output) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py index 05e8b41e..ff579055 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py @@ -12,7 +12,7 @@ # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """some various utilities and helper classes, most of them used in the main pylint class """ @@ -195,7 +195,7 @@ def __init__(self): # version). It contains the 1:1 mapping from symbolic names # to message definition objects. self._messages = {} - # Maps alternative names (numeric IDs, deprecated names) to + # Maps alternative names (numeric IDs, deprecated names) to # message definitions. May contain several names for each definition # object. self._alternative_names = {} @@ -499,7 +499,7 @@ def print_full_documentation(self): print title print '~' * len(title) for msgid, msg in sorted(msgs.iteritems(), - key=lambda (k,v): (_MSG_ORDER.index(k[0]), k)): + key=lambda (k, v): (_MSG_ORDER.index(k[0]), k)): msg = build_message_def(checker, msgid, msg) print msg.format_help(checkerref=False) print @@ -542,7 +542,7 @@ def register_report(self, reportid, r_title, r_cb, checker): checker is the checker defining the report """ reportid = reportid.upper() - self._reports.setdefault(checker, []).append( (reportid, r_title, r_cb) ) + self._reports.setdefault(checker, []).append((reportid, r_title, r_cb)) def enable_report(self, reportid): """disable the report of the given id""" @@ -612,24 +612,24 @@ def expand_modules(files_or_modules, black_list): try: filepath = file_from_modpath(modname.split('.')) if filepath is None: - errors.append( {'key' : 'F0003', 'mod': modname} ) + errors.append({'key' : 'F0003', 'mod': modname}) continue except (ImportError, SyntaxError), ex: # FIXME p3k : the SyntaxError is a Python bug and should be # removed as soon as possible http://bugs.python.org/issue10588 - errors.append( {'key': 'F0001', 'mod': modname, 'ex': ex} ) + errors.append({'key': 'F0001', 'mod': modname, 'ex': ex}) continue filepath = normpath(filepath) - result.append( {'path': filepath, 'name': modname, - 'basepath': filepath, 'basename': modname} ) + result.append({'path': filepath, 'name': modname, + 'basepath': filepath, 'basename': modname}) if not (modname.endswith('.__init__') or modname == '__init__') \ and '__init__.py' in filepath: for subfilepath in get_module_files(dirname(filepath), black_list): if filepath == subfilepath: continue submodname = '.'.join(modpath_from_file(subfilepath)) - result.append( {'path': subfilepath, 'name': submodname, - 'basepath': filepath, 'basename': modname} ) + result.append({'path': subfilepath, 'name': submodname, + 'basepath': filepath, 'basename': modname}) return result, errors diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/tasks.py index 4616fbf2..64b542bc 100644 --- a/pymode/libs/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -26,6 +26,7 @@ class Worker(threading.Thread): """ Get tasks from queue and run. """ def __init__(self, path_queue, result_queue): + """ Init worker. """ threading.Thread.__init__(self) self.path_queue = path_queue self.result_queue = result_queue @@ -45,7 +46,6 @@ def async_check_files(paths, options, rootpath=None): :return list: list of errors """ - errors = [] # Disable async if pylint enabled @@ -85,7 +85,6 @@ def check_path(path, options=None, rootpath=None, code=None, **meta): :return list: list of errors """ - LOGGER.info("Parse file: %s", path) rootpath = rootpath or '.' From 60cc4f506185a174b00e8a6807a116d8ce76fea5 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 24 Mar 2014 18:36:33 +0700 Subject: [PATCH 084/428] Update pylint --- pymode/libs/pylama/__init__.py | 2 +- .../pylama/lint/pylama_pyflakes/__init__.py | 9 ++--- .../pylama/lint/pylama_pylint/__init__.py | 5 +-- .../lint/pylama_pylint/astroid/as_string.py | 9 +++++ .../lint/pylama_pylint/astroid/builder.py | 6 +++ .../lint/pylama_pylint/astroid/manager.py | 27 +++++++++++++ .../pylama_pylint/astroid/node_classes.py | 3 ++ .../lint/pylama_pylint/astroid/nodes.py | 4 +- .../lint/pylama_pylint/astroid/rebuilder.py | 38 ++++++------------- .../pylama_pylint/astroid/scoped_nodes.py | 5 ++- pymode/libs/pylama/lint/pylama_pylint/main.py | 9 +++-- .../lint/pylama_pylint/pylint/__pkginfo__.py | 9 ++++- .../pylama_pylint/pylint/checkers/__init__.py | 1 - .../pylama_pylint/pylint/checkers/utils.py | 2 +- .../pylint/checkers/variables.py | 2 +- .../pylama/lint/pylama_pylint/pylint/lint.py | 35 ++++++++++------- .../pylint/reporters/__init__.py | 4 ++ 17 files changed, 107 insertions(+), 63 deletions(-) diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index d030cbc4..6a75f430 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -version_info = 3, 0, 0 +version_info = 3, 0, 2 __version__ = version = '.'.join(map(str, version_info)) __project__ = __name__ diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py index f6445453..4ecf0829 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py @@ -9,16 +9,16 @@ path = op.dirname(op.abspath(__file__)) sys.path.insert(0, path) +from pyflakes import checker + class Linter(BaseLinter): """ Pyflakes code check. """ def __init__(self): - from pyflakes import messages - - if messages.UndefinedName.message != "E0602 undefined name %r": - monkey_patch_messages(messages) + if checker.messages.UndefinedName.message != "E0602 undefined name %r": + monkey_patch_messages(checker.messages) @staticmethod def run(path, code=None, builtins="", **meta): @@ -29,7 +29,6 @@ def run(path, code=None, builtins="", **meta): """ import _ast import os - from pyflakes import checker os.environ.setdefault('PYFLAKES_BUILTINS', builtins) diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index 92150ae4..553b1a69 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,19 +4,16 @@ # ================== -__version__ = '0.1.6' +__version__ = '0.2.0' __project__ = 'pylama_pylint' __author__ = "horneds " __license__ = "BSD" -import os.path import sys if sys.version_info >= (3, 0, 0): raise ImportError("pylama_pylint doesnt support python3") -CURDIR = os.path.abspath(os.path.dirname(__file__)) -sys.path.insert(0, CURDIR) from .main import Linter assert Linter diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py index 72c5b6c3..ace1c4e3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py @@ -467,6 +467,15 @@ def visit_starred(self, node): """return Starred node as string""" return "*" + node.value.accept(self) + def visit_yieldfrom(self, node): + """ Return an astroid.YieldFrom node as string. """ + yi_val = node.value and (" " + node.value.accept(self)) or "" + expr = 'yield from' + yi_val + if node.parent.is_statement: + return expr + else: + return "(%s)" % (expr,) + def _import_string(names): """return a list of (name, asname) formatted as a string""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py index fc653ec2..b088b205 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py @@ -102,6 +102,12 @@ def module_build(self, module, modname=None): # this is a built-in module # get a partial representation by introspection node = self.inspect_build(module, modname=modname, path=path) + # we have to handle transformation by ourselves since the rebuilder + # isn't called for builtin nodes + # + # XXX it's then only called for Module nodes, not for underlying + # nodes + node = self._manager.transform(node) return node def file_build(self, path, modname=None): diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py index 53b1a9c8..f5e81321 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py @@ -273,6 +273,33 @@ def register_transform(self, node_class, transform, predicate=None): """ self.transforms.setdefault(node_class, []).append( (transform, predicate) ) + def unregister_transform(self, node_class, transform, predicate=None): + """Unregister the given transform.""" + self.transforms[node_class].remove( (transform, predicate) ) + + def transform(self, node): + """Call matching transforms for the given node if any and return the + transformed node. + """ + try: + transforms = self.transforms[type(node)] + except KeyError: + return node # no transform registered for this class of node + orig_node = node # copy the reference + for transform_func, predicate in transforms: + if predicate is None or predicate(node): + ret = transform_func(node) + # if the transformation function returns something, it's + # expected to be a replacement for the node + if ret is not None: + if node is not orig_node: + # node has already be modified by some previous + # transformation, warn about it + warn('node %s substitued multiple times' % node) + node = ret + return node + + class Project(object): """a project handle a set of modules / packages""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py index 97bd9079..01dc8d92 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py @@ -888,6 +888,9 @@ class Yield(NodeNG): _astroid_fields = ('value',) value = None +class YieldFrom(Yield): + """ Class representing a YieldFrom node. """ + # constants ############################################################## CONST_CLS = { diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py index 9fdc67e6..263ab476 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py @@ -43,7 +43,7 @@ Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \ From, Getattr, Global, If, IfExp, Import, Index, Keyword, \ List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \ - TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, \ + TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, \ const_factory from astroid.scoped_nodes import Module, GenExpr, Lambda, DictComp, \ ListComp, SetComp, Function, Class @@ -68,6 +68,6 @@ TryExcept, TryFinally, Tuple, UnaryOp, While, With, - Yield, + Yield, YieldFrom ) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py index b9a9adbb..ef8e7635 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py @@ -122,6 +122,14 @@ def _infer_metaclass(node): elif isinstance(node, Attribute): return node.attr +def _create_yield_node(node, parent, rebuilder, factory): + newnode = factory() + _lineno_parent(node, newnode, parent) + if node.value is not None: + newnode.value = rebuilder.visit(node.value, newnode) + newnode.set_line_info(newnode.last_child()) + return newnode + class TreeRebuilder(object): """Rebuilds the _ast tree to become an Astroid tree""" @@ -134,25 +142,7 @@ def __init__(self, manager): self._from_nodes = [] self._delayed_assattr = [] self._visit_meths = {} - - def _transform(self, node): - try: - transforms = self._manager.transforms[type(node)] - except KeyError: - return node # no transform registered for this class of node - orig_node = node # copy the reference - for transform_func, predicate in transforms: - if predicate is None or predicate(node): - ret = transform_func(node) - # if the transformation function returns something, it's - # expected to be a replacement for the node - if ret is not None: - if node is not orig_node: - # node has already be modified by some previous - # transformation, warn about it - warn('node %s substitued multiple times' % node) - node = ret - return node + self._transform = manager.transform def visit_module(self, node, modname, package): """visit a Module node by returning a fresh instance of it""" @@ -837,13 +827,7 @@ def visit_with(self, node, parent): def visit_yield(self, node, parent): """visit a Yield node by returning a fresh instance of it""" - newnode = new.Yield() - _lineno_parent(node, newnode, parent) - if node.value is not None: - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - + return _create_yield_node(node, parent, self, new.Yield) class TreeRebuilder3k(TreeRebuilder): """extend and overwrite TreeRebuilder for python3k""" @@ -954,7 +938,7 @@ def visit_child(child): return newnode def visit_yieldfrom(self, node, parent): - return self.visit_yield(node, parent) + return _create_yield_node(node, parent, self, new.YieldFrom) def visit_class(self, node, parent): newnode = super(TreeRebuilder3k, self).visit_class(node, parent) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py index a7f6ee8c..d579913a 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py @@ -32,7 +32,7 @@ from astroid.exceptions import NotFoundError, \ AstroidBuildingException, InferenceError from astroid.node_classes import Const, DelName, DelAttr, \ - Dict, From, List, Pass, Raise, Return, Tuple, Yield, \ + Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \ LookupMixIn, const_factory as cf, unpack_infer from astroid.bases import NodeNG, InferenceContext, Instance,\ YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \ @@ -620,7 +620,8 @@ def is_generator(self): """return true if this is a generator function""" # XXX should be flagged, not computed try: - return self.nodes_of_class(Yield, skip_klass=(Function, Lambda)).next() + return self.nodes_of_class((Yield, YieldFrom), + skip_klass=(Function, Lambda)).next() except StopIteration: return False diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 131392d3..3699bbbb 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -1,14 +1,17 @@ """ Pylint support. """ from os import path as op, environ +import sys from pylama.lint import Linter as BaseLinter +CURDIR = op.abspath(op.dirname(__file__)) +sys.path.insert(0, CURDIR) + from astroid import MANAGER from pylint.lint import Run from pylint.reporters import BaseReporter - -PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc')) +PYLINT_RC = op.abspath(op.join(CURDIR, 'pylint.rc')) class Linter(BaseLinter): @@ -16,7 +19,7 @@ class Linter(BaseLinter): """ Check code with pylint. """ @staticmethod - def run(path, **meta): # noqa + def run(path, code=None, **meta): # noqa """ Pylint code checking. :return list: List of errors. diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py index d0ad387c..b48272d5 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py @@ -1,5 +1,5 @@ # pylint: disable=W0622,C0103 -# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -15,13 +15,18 @@ # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """pylint packaging information""" +import sys modname = distname = 'pylint' numversion = (1, 1, 0) version = '.'.join([str(num) for num in numversion]) -install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1'] +if sys.version_info < (2, 6): + install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1', + 'StringFormat'] +else: + install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1'] license = 'GPL' description = "python code static checker" diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py index c68f2a8f..9346904e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py @@ -41,7 +41,6 @@ import sys import tokenize import warnings -from os.path import dirname from astroid.utils import ASTWalker from logilab.common.configuration import OptionsProviderMixIn diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py index 78deb4e7..e7d85d41 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py @@ -407,7 +407,7 @@ def get_argument_from_call(callfunc_node, position=None, keyword=None): try: if position is not None and not isinstance(callfunc_node.args[position], astroid.Keyword): return callfunc_node.args[position] - except IndexError as error: + except IndexError, error: raise NoSuchArgumentError(error) if keyword: for arg in callfunc_node.args: diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py index cbb14317..7c489e8b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py @@ -233,7 +233,7 @@ def leave_module(self, node): self.add_message('undefined-all-variable', args=elt_name, node=elt) - except SyntaxError as exc: + except SyntaxError, exc: # don't yield an syntax-error warning, # because it will be later yielded # when the file will be checked diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py index 2d781b76..7d88d8e4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py @@ -1,4 +1,4 @@ -# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -873,15 +873,20 @@ def preprocess_options(args, search_for): option, val = arg[2:], None try: cb, takearg = search_for[option] + except KeyError: + i += 1 + else: del args[i] if takearg and val is None: if i >= len(args) or args[i].startswith('-'): - raise ArgumentPreprocessingError(arg) + msg = 'Option %s expects a value' % option + raise ArgumentPreprocessingError(msg) val = args[i] del args[i] + elif not takearg and val is not None: + msg = "Option %s doesn't expects a value" % option + raise ArgumentPreprocessingError(msg) cb(option, val) - except KeyError: - i += 1 else: i += 1 @@ -901,12 +906,13 @@ def __init__(self, args, reporter=None, exit=True): self._plugins = [] try: preprocess_options(args, { - # option: (callback, takearg) - 'rcfile': (self.cb_set_rcfile, True), - 'load-plugins': (self.cb_add_plugins, True), - }) + # option: (callback, takearg) + 'init-hooks': (cb_init_hook, True), + 'rcfile': (self.cb_set_rcfile, True), + 'load-plugins': (self.cb_add_plugins, True), + }) except ArgumentPreprocessingError, ex: - print >> sys.stderr, 'Argument %s expects a value.' % (ex.args[0],) + print >> sys.stderr, ex sys.exit(32) self.linter = linter = self.LinterClass(( @@ -916,8 +922,9 @@ def __init__(self, args, reporter=None, exit=True): 'help' : 'Specify a configuration file.'}), ('init-hook', - {'action' : 'callback', 'type' : 'string', 'metavar': '', - 'callback' : cb_init_hook, 'level': 1, + {'action' : 'callback', 'callback' : lambda *args: 1, + 'type' : 'string', 'metavar': '', + 'level': 1, 'help' : 'Python code to execute, usually for sys.path \ manipulation such as pygtk.require().'}), @@ -1043,11 +1050,11 @@ def __init__(self, args, reporter=None, exit=True): sys.exit(self.linter.msg_status) def cb_set_rcfile(self, name, value): - """callback for option preprocessing (i.e. before optik parsing)""" + """callback for option preprocessing (i.e. before option parsing)""" self._rcfile = value def cb_add_plugins(self, name, value): - """callback for option preprocessing (i.e. before optik parsing)""" + """callback for option preprocessing (i.e. before option parsing)""" self._plugins.extend(splitstrip(value)) def cb_error_mode(self, *args, **kwargs): @@ -1086,7 +1093,7 @@ def cb_list_messages(self, option, optname, value, parser): # FIXME self.linter.list_messages() sys.exit(0) -def cb_init_hook(option, optname, value, parser): +def cb_init_hook(optname, value): """exec arbitrary code to set sys.path for instance""" exec value diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py index e3d93efd..a767a052 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/reporters/__init__.py @@ -28,6 +28,10 @@ def cmp(a, b): return (a > b) - (a < b) +if sys.version_info < (2, 6): + import stringformat + stringformat.init(True) + def diff_string(old, new): """given a old and new int value, return a string representing the difference From 603323466829451918d27d10e05e3b3c14063418 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Mar 2014 13:27:29 +0700 Subject: [PATCH 085/428] Add curdir to sys.path when code running --- pymode/run.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pymode/run.py b/pymode/run.py index f7ddddaf..5860f7d9 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -20,7 +20,6 @@ def run_code(): :returns: None """ - errors, err = [], '' line1, line2 = env.var('a:line1'), env.var('a:line2') lines = __prepare_lines(line1, line2) @@ -36,7 +35,9 @@ def run_code(): try: code = compile('\n'.join(lines) + '\n', env.curbuf.name, 'exec') + sys.path.insert(0, env.curdir) exec(code, context) # noqa + sys.path.pop(0) except SystemExit as e: if e.code: From 48eb0edfd079eb0b282afdfb027f82cb2d59f0e7 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Mar 2014 18:28:06 +0700 Subject: [PATCH 086/428] update pylama --- pymode/libs/pylama/__init__.py | 6 +- pymode/libs/pylama/config.py | 55 +++-- pymode/libs/pylama/core.py | 7 +- .../libs/pylama/lint/pylama_pep8/__init__.py | 14 +- pymode/libs/pylama/lint/pylama_pep8/pep8.py | 189 +++++++++++++----- pymode/libs/pylama/tasks.py | 2 +- pymode/lint.py | 1 + 7 files changed, 183 insertions(+), 91 deletions(-) diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index 6a75f430..96d3a487 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,9 +5,7 @@ """ -version_info = 3, 0, 2 - -__version__ = version = '.'.join(map(str, version_info)) -__project__ = __name__ +__version__ = "3.1.1" +__project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index 04ac4bb6..5ba7e5e7 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -7,7 +7,7 @@ import logging from argparse import ArgumentParser -from . import version +from . import __version__ from .libs.inirama import Namespace from .lint.extensions import LINTERS @@ -72,7 +72,7 @@ def parse_linters(linters): "--verbose", "-v", action='store_true', help="Verbose mode.") PARSER.add_argument('--version', action='version', - version='%(prog)s ' + version) + version='%(prog)s ' + __version__) PARSER.add_argument( "--format", "-f", default=_Default('pep8'), choices=['pep8', 'pylint'], @@ -116,7 +116,7 @@ def parse_linters(linters): ACTIONS = dict((a.dest, a) for a in PARSER._actions) -def parse_options(args=None, **overrides): # noqa +def parse_options(args=None, config=True, **overrides): # noqa """ Parse options from command line and configuration files. :return argparse.Namespace: @@ -127,16 +127,8 @@ def parse_options(args=None, **overrides): # noqa # Parse args from command string options = PARSER.parse_args(args) - - # Parse options from ini file - cfg = get_config(str(options.options)) - - # Compile options from ini - for k, v in cfg.default.items(): - LOGGER.info('Find option %s (%s)', k, v) - passed_value = getattr(options, k, _Default()) - if isinstance(passed_value, _Default): - setattr(options, k, _Default(v)) + options.file_params = dict() + options.linter_params = dict() # Override options for k, v in overrides.items(): @@ -144,27 +136,34 @@ def parse_options(args=None, **overrides): # noqa if isinstance(passed_value, _Default): setattr(options, k, _Default(v)) + # Compile options from ini + if config: + cfg = get_config(str(options.options)) + for k, v in cfg.default.items(): + LOGGER.info('Find option %s (%s)', k, v) + passed_value = getattr(options, k, _Default()) + if isinstance(passed_value, _Default): + setattr(options, k, _Default(v)) + + # Parse file related options + for k, s in cfg.sections.items(): + if k == cfg.default_section: + continue + if k in LINTERS: + options.linter_params[k] = dict(s) + continue + mask = re(fnmatch.translate(k)) + options.file_params[mask] = dict(s) + options.file_params[mask]['lint'] = int( + options.file_params[mask].get('lint', 1) + ) + # Postprocess options opts = dict(options.__dict__.items()) for name, value in opts.items(): if isinstance(value, _Default): setattr(options, name, process_value(name, value.value)) - # Parse file related options - options.file_params = dict() - options.linter_params = dict() - for k, s in cfg.sections.items(): - if k == cfg.default_section: - continue - if k in LINTERS: - options.linter_params[k] = dict(s) - continue - mask = re(fnmatch.translate(k)) - options.file_params[mask] = dict(s) - options.file_params[mask]['lint'] = int( - options.file_params[mask].get('lint', 1) - ) - return options diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 222c5f77..88bbc346 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -47,11 +47,11 @@ def run(path, code=None, options=None): item = (item, LINTERS.get(item)) name, linter = item - LOGGER.debug("Run %s", name) if not linter or not linter.allow(path): continue + LOGGER.info("Run %s", name) meta = options.linter_params.get(name, dict()) result = linter.run(path, code=code, **meta) for e in result: @@ -77,9 +77,8 @@ def run(path, code=None, options=None): )) except Exception as e: - LOGGER.debug("Unknown exception %s", e) import traceback - logging.debug(traceback.format_exc()) + LOGGER.info(traceback.format_exc()) errors = [er for er in errors if filter_errors(er, **params)] @@ -180,7 +179,7 @@ def __enter__(self): def __exit__(self, t, value, traceback): """ Close opened file. """ - if not self._file is None: + if self._file is not None: self._file.close() if t and LOGGER.level == logging.DEBUG: diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py index 830b080e..4e83a425 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep8/__init__.py @@ -8,7 +8,6 @@ from io import StringIO - class Linter(BaseLinter): """ PEP8 code check. """ @@ -42,12 +41,13 @@ def error(self, line_number, offset, text, check): code = super(_PEP8Report, self).error( line_number, offset, text, check) - self.errors.append(dict( - text=text, - type=code.replace('E', 'C'), - col=offset + 1, - lnum=line_number, - )) + if code: + self.errors.append(dict( + text=text, + type=code.replace('E', 'C'), + col=offset + 1, + lnum=line_number, + )) def get_file_results(self): """ Get errors. diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py index 215bde2d..7625736d 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pylama/lint/pylama_pep8/pep8.py @@ -45,7 +45,7 @@ 700 statements 900 syntax error """ -__version__ = '1.4.7a0' +__version__ = '1.5.0' import os import sys @@ -93,12 +93,13 @@ INDENT_REGEX = re.compile(r'([ \t]*)') RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,') -RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,\s*\w+\s*,\s*\w+') +RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$') ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b') DOCSTRING_REGEX = re.compile(r'u?r?["\']') EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]') WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)') COMPARE_SINGLETON_REGEX = re.compile(r'([=!]=)\s*(None|False|True)') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^[({ ]+\s+(in|is)\s') COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type' r'|\s*\(\s*([^)]*[^ )])\s*\))') KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS)) @@ -200,7 +201,7 @@ def missing_newline(physical_line): return len(physical_line), "W292 no newline at end of file" -def maximum_line_length(physical_line, max_line_length): +def maximum_line_length(physical_line, max_line_length, multiline): """ Limit all lines to a maximum of 79 characters. @@ -216,6 +217,13 @@ def maximum_line_length(physical_line, max_line_length): line = physical_line.rstrip() length = len(line) if length > max_line_length and not noqa(line): + # Special case for long URLs in multi-line docstrings or comments, + # but still report the error when the 72 first chars are whitespaces. + chunks = line.split() + if ((len(chunks) == 1 and multiline) or + (len(chunks) == 2 and chunks[0] == '#')) and \ + len(line) - len(chunks[-1]) < max_line_length - 7: + return if hasattr(line, 'decode'): # Python 2 # The line could contain multi-byte characters try: @@ -382,7 +390,7 @@ def indentation(logical_line, previous_logical, indent_char, def continued_indentation(logical_line, tokens, indent_level, hang_closing, - noqa, verbose): + indent_char, noqa, verbose): r""" Continuation lines should align wrapped elements either vertically using Python's implicit line joining inside parentheses, brackets and braces, or @@ -403,10 +411,12 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, E122: a = (\n42) E123: a = (\n 42\n ) E124: a = (24,\n 42\n) - E125: if (a or\n b):\n pass + E125: if (\n b):\n pass E126: a = (\n 42) E127: a = (24,\n 42) E128: a = (24,\n 42) + E129: if (a or\n b):\n pass + E131: a = (\n 42\n 24) """ first_row = tokens[0][2][0] nrows = 1 + tokens[-1][2][0] - first_row @@ -420,12 +430,15 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, indent_next = logical_line.endswith(':') row = depth = 0 + valid_hangs = (4,) if indent_char != '\t' else (4, 8) # remember how many brackets were opened on each line parens = [0] * nrows # relative indents of physical lines rel_indent = [0] * nrows # for each depth, collect a list of opening rows open_rows = [[0]] + # for each depth, memorize the hanging indentation + hangs = [None] # visual indents indent_chances = {} last_indent = tokens[0][2] @@ -455,11 +468,13 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, close_bracket = (token_type == tokenize.OP and text in ']})') # is the indent relative to an opening bracket line? - valid_hang = 4 if (hang_closing or not close_bracket) else 0 for open_row in reversed(open_rows[depth]): - if rel_indent[row] == rel_indent[open_row] + valid_hang: + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: break - hang = rel_indent[row] - rel_indent[open_row] + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) # is there any chance of visual indent? visual_indent = (not close_bracket and hang > 0 and indent_chances.get(start[1])) @@ -478,15 +493,15 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, # visual indent is broken yield (start, "E128 continuation line " "under-indented for visual indent") - elif hang == 4 or (indent_next and rel_indent[row] == 8): + elif hanging_indent or (indent_next and rel_indent[row] == 8): # hanging indent is verified if close_bracket and not hang_closing: yield (start, "E123 closing bracket does not match " "indentation of opening bracket's line") + hangs[depth] = hang elif visual_indent is True: # visual indent is verified - if not indent[depth]: - indent[depth] = start[1] + indent[depth] = start[1] elif visual_indent in (text, str): # ignore token lined up with matching one from a previous line pass @@ -496,10 +511,14 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, error = "E122", "missing indentation or outdented" elif indent[depth]: error = "E127", "over-indented for visual indent" - elif hang % 4: - error = "E121", "indentation is not a multiple of four" + elif not close_bracket and hangs[depth]: + error = "E131", "unaligned for hanging indent" else: - error = "E126", "over-indented for hanging indent" + hangs[depth] = hang + if hang > 4: + error = "E126", "over-indented for hanging indent" + else: + error = "E121", "under-indented for hanging indent" yield start, "%s continuation line %s" % error # look for visual indenting @@ -516,12 +535,15 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, # special case for the "if" statement because len("if (") == 4 elif not indent_chances and not row and not depth and text == 'if': indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) # keep track of bracket depth if token_type == tokenize.OP: if text in '([{': depth += 1 indent.append(0) + hangs.append(None) if len(open_rows) == depth: open_rows.append([]) open_rows[depth].append(row) @@ -532,6 +554,7 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, elif text in ')]}' and depth > 0: # parent indents should not be more than this one prev_indent = indent.pop() or last_indent[1] + hangs.pop() for d in range(depth): if indent[d] > prev_indent: indent[d] = 0 @@ -554,8 +577,11 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, last_token_multiline = (start[0] != end[0]) if indent_next and expand_indent(line) == indent_level + 4: - yield (last_indent, "E125 continuation line does not distinguish " - "itself from next logical line") + if visual_indent: + code = "E129 visually indented line" + else: + code = "E125 continuation line" + yield (last_indent, "%s with same indent as next logical line" % code) def whitespace_before_parameters(logical_line, tokens): @@ -774,7 +800,7 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): prev_end = end -def whitespace_before_inline_comment(logical_line, tokens): +def whitespace_before_comment(logical_line, tokens): """ Separate inline comments by at least two spaces. @@ -782,23 +808,33 @@ def whitespace_before_inline_comment(logical_line, tokens): comments should be separated by at least two spaces from the statement. They should start with a # and a single space. + Each line of a block comment starts with a # and a single space + (unless it is indented text inside the comment). + Okay: x = x + 1 # Increment x Okay: x = x + 1 # Increment x + Okay: # Block comment E261: x = x + 1 # Increment x E262: x = x + 1 #Increment x E262: x = x + 1 # Increment x + E265: #Block comment """ prev_end = (0, 0) for token_type, text, start, end, line in tokens: if token_type == tokenize.COMMENT: - if not line[:start[1]].strip(): - continue - if prev_end[0] == start[0] and start[1] < prev_end[1] + 2: - yield (prev_end, - "E261 at least two spaces before inline comment") + inline_comment = line[:start[1]].strip() + if inline_comment: + if prev_end[0] == start[0] and start[1] < prev_end[1] + 2: + yield (prev_end, + "E261 at least two spaces before inline comment") symbol, sp, comment = text.partition(' ') - if symbol not in ('#', '#:') or comment[:1].isspace(): - yield start, "E262 inline comment should start with '# '" + bad_prefix = symbol not in ('#', '#:') + if inline_comment: + if bad_prefix or comment[:1].isspace(): + yield start, "E262 inline comment should start with '# '" + elif bad_prefix: + if text.rstrip('#') and (start[0] > 1 or symbol[1] != '!'): + yield start, "E265 block comment should start with '# '" elif token_type != tokenize.NL: prev_end = end @@ -934,6 +970,29 @@ def comparison_to_singleton(logical_line, noqa): (code, singleton, msg)) +def comparison_negative(logical_line): + r""" + Negative comparison, either identity or membership, should be + done using "not in" and "is not". + + Okay: if x not in y:\n pass + Okay: assert (X in Y or X is Z) + Okay: if not (X in Y):\n pass + Okay: zz = x is not y + E713: Z = not X in Y + E713: if not X.B in Y:\n pass + E714: if not X is Y:\n pass + E714: Z = not X.B is Y + """ + match = COMPARE_NEGATIVE_REGEX.search(logical_line) + if match: + pos = match.start(1) + if match.group(2) == 'in': + yield pos, "E713 test for membership should be 'not in'" + else: + yield pos, "E714 test for object identity should be 'is not'" + + def comparison_type(logical_line): """ Object type comparisons should always use isinstance() instead of @@ -957,7 +1016,7 @@ def comparison_type(logical_line): yield match.start(), "E721 do not compare types, use 'isinstance()'" -def python_3000_has_key(logical_line): +def python_3000_has_key(logical_line, noqa): r""" The {}.has_key() method is removed in the Python 3. Use the 'in' operation instead. @@ -966,7 +1025,7 @@ def python_3000_has_key(logical_line): W601: assert d.has_key('alph') """ pos = logical_line.find('.has_key(') - if pos > -1: + if pos > -1 and not noqa: yield pos, "W601 .has_key() is deprecated, use 'in'" @@ -1137,7 +1196,7 @@ def normalize_paths(value, parent=os.curdir): return value paths = [] for path in value.split(','): - if path.startswith('./'): + if '/' in path: path = os.path.abspath(os.path.join(parent, path)) paths.append(path.rstrip('/')) return paths @@ -1208,6 +1267,7 @@ def __init__(self, filename=None, lines=None, self._logical_checks = options.logical_checks self._ast_checks = options.ast_checks self.max_line_length = options.max_line_length + self.multiline = False # in a multiline string? self.hang_closing = options.hang_closing self.verbose = options.verbose self.filename = filename @@ -1256,16 +1316,9 @@ def readline(self): self.line_number += 1 if self.line_number > len(self.lines): return '' - return self.lines[self.line_number - 1] - - def readline_check_physical(self): - """ - Check and return the next physical line. This method can be - used to feed tokenize.generate_tokens. - """ - line = self.readline() - if line: - self.check_physical(line) + line = self.lines[self.line_number - 1] + if self.indent_char is None and line[:1] in WHITESPACE: + self.indent_char = line[0] return line def run_check(self, check, argument_names): @@ -1282,13 +1335,13 @@ def check_physical(self, line): Run all physical checks on a raw input line. """ self.physical_line = line - if self.indent_char is None and line[:1] in WHITESPACE: - self.indent_char = line[0] for name, check, argument_names in self._physical_checks: result = self.run_check(check, argument_names) if result is not None: (offset, text) = result self.report_error(self.line_number, offset, text, check) + if text[:4] == 'E101': + self.indent_char = line[0] def build_tokens_line(self): """ @@ -1336,9 +1389,9 @@ def check_logical(self): """ self.build_tokens_line() self.report.increment_logical_line() - first_line = self.lines[self.mapping[0][1][2][0] - 1] - indent = first_line[:self.mapping[0][1][2][1]] - self.previous_indent_level = self.indent_level + token0 = self.mapping[0][1] if self.mapping else self.tokens[0] + first_line = self.lines[token0[2][0] - 1] + indent = first_line[:token0[2][1]] self.indent_level = expand_indent(indent) if self.verbose >= 2: print(self.logical_line[:80].rstrip()) @@ -1350,12 +1403,17 @@ def check_logical(self): if isinstance(offset, tuple): (orig_number, orig_offset) = offset else: + orig_number = token0[2][0] + orig_offset = token0[2][1] + offset for token_offset, token in self.mapping: if offset >= token_offset: orig_number = token[2][0] orig_offset = (token[2][1] + offset - token_offset) self.report_error(orig_number, orig_offset, text, check) - self.previous_logical = self.logical_line + if self.logical_line: + self.previous_indent_level = self.indent_level + self.previous_logical = self.logical_line + self.tokens = [] def check_ast(self): try: @@ -1371,13 +1429,46 @@ def check_ast(self): def generate_tokens(self): if self._io_error: self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) - tokengen = tokenize.generate_tokens(self.readline_check_physical) + tokengen = tokenize.generate_tokens(self.readline) try: for token in tokengen: + self.maybe_check_physical(token) yield token except (SyntaxError, tokenize.TokenError): self.report_invalid_syntax() + def maybe_check_physical(self, token): + """ + If appropriate (based on token), check current physical line(s). + """ + # Called after every token, but act only on end of line. + if token[0] in (tokenize.NEWLINE, tokenize.NL): + # Obviously, a newline token ends a single physical line. + self.check_physical(token[4]) + elif token[0] == tokenize.STRING and '\n' in token[1]: + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - we don't *completely* ignore the last line; if it contains + # the magical "# noqa" comment, we disable all physical + # checks for the entire multiline string + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + if noqa(token[4]): + return + self.multiline = True + self.line_number = token[2][0] + for line in token[1].split('\n')[:-1]: + self.check_physical(line + '\n') + self.line_number += 1 + self.multiline = False + def check_all(self, expected=None, line_offset=0): """ Run all checks on the input file. @@ -1388,6 +1479,7 @@ def check_all(self, expected=None, line_offset=0): self.line_number = 0 self.indent_char = None self.indent_level = 0 + self.previous_indent_level = 0 self.previous_logical = '' self.tokens = [] self.blank_lines = blank_lines_before_comment = 0 @@ -1412,20 +1504,23 @@ def check_all(self, expected=None, line_offset=0): if self.blank_lines < blank_lines_before_comment: self.blank_lines = blank_lines_before_comment self.check_logical() - self.tokens = [] self.blank_lines = blank_lines_before_comment = 0 elif token_type == tokenize.NL: if len(self.tokens) == 1: # The physical line contains only this token. self.blank_lines += 1 - self.tokens = [] + del self.tokens[0] + else: + self.check_logical() elif token_type == tokenize.COMMENT and len(self.tokens) == 1: if blank_lines_before_comment < self.blank_lines: blank_lines_before_comment = self.blank_lines self.blank_lines = 0 if COMMENT_WITH_NL: # The comment also ends a physical line - self.tokens = [] + text = text.rstrip('\r\n') + self.tokens = [(token_type, text) + token[2:]] + self.check_logical() return self.report.get_file_results() diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/tasks.py index 64b542bc..a7172b31 100644 --- a/pymode/libs/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -49,7 +49,7 @@ def async_check_files(paths, options, rootpath=None): errors = [] # Disable async if pylint enabled - async = options.async and not 'pylint' in options.linters + async = options.async and 'pylint' not in options.linters if not async: for path in paths: diff --git a/pymode/lint.py b/pymode/lint.py index 854458dc..6c89852e 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -26,6 +26,7 @@ def code_check(): select=env.var('g:pymode_lint_select'), linters=env.var('g:pymode_lint_checkers'), ) + env.debug(options) path = os.path.relpath(env.curbuf.name, env.curdir) env.debug("Start code check: ", path) From b79b2e991e48488fb2800d9deec3b2ec457bb095 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 27 Mar 2014 01:41:15 +0700 Subject: [PATCH 087/428] Add __file__ in run context --- pymode/run.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pymode/run.py b/pymode/run.py index 5860f7d9..3a8f24c2 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -28,7 +28,10 @@ def run_code(): lines.pop(ix) context = dict( - __name__='__main__', input=env.user_input, raw_input=env.user_input) + __name__='__main__', + __file__=env.var('expand("%:p")'), + input=env.user_input, + raw_input=env.user_input) sys.stdout, stdout_ = StringIO(), sys.stdout sys.stderr, stderr_ = StringIO(), sys.stderr From 781da2b1cbdf353db24c46e8cdf4b465a3394637 Mon Sep 17 00:00:00 2001 From: Copper Phosphate Date: Tue, 1 Apr 2014 17:20:38 +0200 Subject: [PATCH 088/428] Fixes StringIO import error in #pymode#doc#Show. Fixes #409. --- pymode/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pymode/__init__.py b/pymode/__init__.py index 67578eb9..857bac60 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -31,9 +31,12 @@ class Options(object): def get_documentation(): """ Search documentation and append to current buffer. """ - import StringIO + try: + from StringIO import StringIO + except ImportError: + from io import StringIO - sys.stdout, _ = StringIO.StringIO(), sys.stdout + sys.stdout, _ = StringIO(), sys.stdout help(vim.eval('a:word')) sys.stdout, out = _, sys.stdout.getvalue() vim.current.buffer.append(str(out).splitlines(), 0) From 061bf839ded519a02fef57adfa7adc1d19b08ae5 Mon Sep 17 00:00:00 2001 From: Anton Bakhtin Date: Sat, 3 May 2014 02:42:29 +0400 Subject: [PATCH 089/428] Fix autoimport trigger --- pymode/rope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index 7d50c7d4..b0833ddb 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -362,7 +362,7 @@ def __init__(self, path, project_path): if os.path.exists("%s/__init__.py" % project_path): sys.path.append(project_path) - if self.options.get('autoimport') == '1': + if self.options.get('autoimport'): self.generate_autoimport_cache() env.debug('Context init', project_path) From 2286cc706280246bb22a8d0fde9eb2abe9fb7a71 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 7 May 2014 12:50:22 +0700 Subject: [PATCH 090/428] Fix folding. --- autoload/pymode/folding.vim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 9bfa78d2..78cf8486 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -30,8 +30,8 @@ fun! pymode#folding#text() " {{{ let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) let line = substitute(line, '\%("""\|''''''\)', '', '') - let fillcharcount = windowwidth - len(line) - len(foldedlinecount) - return line . '…' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount . ' ' + let fillcharcount = windowwidth - len(line) - len(foldedlinecount) + 1 + return line . ' ' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount endfunction "}}} From 2f15f3d31b942a20874d2ae5a8cfad7a4088c3eb Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 7 May 2014 12:50:46 +0700 Subject: [PATCH 091/428] Update pylint, pep257, pyflakes. --- pylama.ini | 2 +- pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/libs/inirama.py | 16 +- .../libs/pylama/lint/pylama_pep257/pep257.py | 2 + pymode/libs/pylama/lint/pylama_pep8/pep8.py | 447 ++++++++--------- .../lint/pylama_pyflakes/pyflakes/__init__.py | 2 +- .../lint/pylama_pyflakes/pyflakes/checker.py | 332 +++++++------ .../pylama/lint/pylama_pylint/__init__.py | 6 +- .../lint/pylama_pylint/astroid/__pkginfo__.py | 2 +- .../lint/pylama_pylint/astroid/bases.py | 8 +- .../lint/pylama_pylint/astroid/brain/py2gi.py | 16 +- .../pylama_pylint/astroid/brain/py2stdlib.py | 29 +- .../lint/pylama_pylint/astroid/builder.py | 18 +- .../lint/pylama_pylint/astroid/inference.py | 5 + .../lint/pylama_pylint/astroid/manager.py | 17 +- .../pylama_pylint/astroid/raw_building.py | 3 +- .../pylama_pylint/astroid/scoped_nodes.py | 93 +++- pymode/libs/pylama/lint/pylama_pylint/main.py | 2 +- .../lint/pylama_pylint/pylint/__pkginfo__.py | 12 +- .../pylama_pylint/pylint/checkers/__init__.py | 2 - .../pylama_pylint/pylint/checkers/base.py | 178 ++++--- .../pylama_pylint/pylint/checkers/classes.py | 151 ++++-- .../pylint/checkers/design_analysis.py | 37 +- .../pylint/checkers/exceptions.py | 57 ++- .../pylama_pylint/pylint/checkers/format.py | 452 +++++++++++++++--- .../pylama_pylint/pylint/checkers/imports.py | 45 +- .../pylama_pylint/pylint/checkers/logging.py | 127 +++-- .../pylama_pylint/pylint/checkers/misc.py | 4 +- .../pylama_pylint/pylint/checkers/newstyle.py | 47 +- .../pylama_pylint/pylint/checkers/stdlib.py | 4 +- .../pylama_pylint/pylint/checkers/strings.py | 35 +- .../pylint/checkers/typecheck.py | 126 +++-- .../pylint/checkers/variables.py | 134 ++++-- .../lint/pylama_pylint/pylint/config.py | 2 + .../pylama/lint/pylama_pylint/pylint/lint.py | 106 ++-- .../pylama/lint/pylama_pylint/pylint/utils.py | 50 +- 36 files changed, 1630 insertions(+), 941 deletions(-) diff --git a/pylama.ini b/pylama.ini index d1651077..07c1ab7a 100644 --- a/pylama.ini +++ b/pylama.ini @@ -1,3 +1,3 @@ [main] -ignore = R0201,R0922,C0111,E1103 +ignore = R0201,R0922,E1103 skip = pymode/autopep8.py diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index 96d3a487..2ba54bd4 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -__version__ = "3.1.1" +__version__ = "3.2.0" __project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/libs/inirama.py b/pymode/libs/pylama/libs/inirama.py index 1ba1a1eb..095e8dc0 100644 --- a/pymode/libs/pylama/libs/inirama.py +++ b/pymode/libs/pylama/libs/inirama.py @@ -46,12 +46,12 @@ def __setitem__(self, key, value): def __delitem__(self, key): dict.__delitem__(self, key) - ix = self.__map.pop(key) + self.__map.pop(key) self.__order = self.null def __iter__(self): for key in self.__order: - if not key is self.null: + if key is not self.null: yield key def keys(self): @@ -67,8 +67,8 @@ def keys(self): iteritems = DictMixin.iteritems -__version__ = '0.5.0' -__project__ = 'Inirama' +__version__ = "0.5.1" +__project__ = "Inirama" __author__ = "Kirill Klenov " __license__ = "BSD" @@ -193,7 +193,11 @@ def __init__(self, namespace, *args, **kwargs): self.__storage__ = dict() def __setitem__(self, name, value): - self.__storage__[name] = str(value) + value = str(value) + if value.isdigit(): + value = int(value) + + self.__storage__[name] = value def __getitem__(self, name): return self.__storage__[name] @@ -370,7 +374,7 @@ def __getitem__(self, name): :return :class:`inirama.Section`: section """ - if not name in self.sections: + if name not in self.sections: self.sections[name] = self.section_type(self) return self.sections[name] diff --git a/pymode/libs/pylama/lint/pylama_pep257/pep257.py b/pymode/libs/pylama/lint/pylama_pep257/pep257.py index 69a8fea2..c5df0f72 100644 --- a/pymode/libs/pylama/lint/pylama_pep257/pep257.py +++ b/pymode/libs/pylama/lint/pylama_pep257/pep257.py @@ -235,6 +235,8 @@ def parse_all(self): sys.stderr.write(msg) self.consume(tk.OP) s = '(' + while self.current.kind in (tk.NL, tk.COMMENT): + self.stream.move() if self.current.kind != tk.STRING: raise AllError('Could not evaluate contents of __all__. ') while self.current.value not in ')]': diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py index 7625736d..bc911374 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pylama/lint/pylama_pep8/pep8.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # pep8.py - Check Python source code formatting, according to PEP 8 # Copyright (C) 2006-2009 Johann C. Rocholl -# Copyright (C) 2009-2013 Florent Xicluna +# Copyright (C) 2009-2014 Florent Xicluna # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files @@ -24,8 +24,7 @@ # SOFTWARE. r""" -Check Python source code formatting, according to PEP 8: -http://www.python.org/dev/peps/pep-0008/ +Check Python source code formatting, according to PEP 8. For usage and a list of options, try this: $ python pep8.py -h @@ -45,7 +44,9 @@ 700 statements 900 syntax error """ -__version__ = '1.5.0' +from __future__ import with_statement + +__version__ = '1.5.7a0' import os import sys @@ -87,8 +88,10 @@ '**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>', '%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '=']) WHITESPACE = frozenset(' \t') -SKIP_TOKENS = frozenset([tokenize.COMMENT, tokenize.NL, tokenize.NEWLINE, - tokenize.INDENT, tokenize.DEDENT]) +NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE]) +SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT]) +# ERRORTOKEN is triggered by backticks in Python 3 +SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN]) BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines'] INDENT_REGEX = re.compile(r'([ \t]*)') @@ -118,8 +121,7 @@ def tabs_or_spaces(physical_line, indent_char): - r""" - Never mix tabs and spaces. + r"""Never mix tabs and spaces. The most popular way of indenting Python is with spaces only. The second-most popular way is with tabs only. Code indented with a mixture @@ -138,9 +140,7 @@ def tabs_or_spaces(physical_line, indent_char): def tabs_obsolete(physical_line): - r""" - For new projects, spaces-only are strongly recommended over tabs. Most - editors have features that make this easy to do. + r"""For new projects, spaces-only are strongly recommended over tabs. Okay: if True:\n return W191: if True:\n\treturn @@ -151,16 +151,7 @@ def tabs_obsolete(physical_line): def trailing_whitespace(physical_line): - r""" - JCR: Trailing whitespace is superfluous. - FBM: Except when it occurs as part of a blank line (i.e. the line is - nothing but whitespace). According to Python docs[1] a line with only - whitespace is considered a blank line, and is to be ignored. However, - matching a blank line to its indentation level avoids mistakenly - terminating a multi-line statement (e.g. class declaration) when - pasting code into the standard Python interpreter. - - [1] http://docs.python.org/reference/lexical_analysis.html#blank-lines + r"""Trailing whitespace is superfluous. The warning returned varies on whether the line itself is blank, for easier filtering for those who want to indent their blank lines. @@ -180,30 +171,24 @@ def trailing_whitespace(physical_line): return 0, "W293 blank line contains whitespace" -def trailing_blank_lines(physical_line, lines, line_number): - r""" - JCR: Trailing blank lines are superfluous. +def trailing_blank_lines(physical_line, lines, line_number, total_lines): + r"""Trailing blank lines are superfluous. Okay: spam(1) W391: spam(1)\n - """ - if not physical_line.rstrip() and line_number == len(lines): - return 0, "W391 blank line at end of file" - - -def missing_newline(physical_line): - """ - JCR: The last line should have a newline. - Reports warning W292. + However the last line should end with a new line (warning W292). """ - if physical_line.rstrip() == physical_line: - return len(physical_line), "W292 no newline at end of file" + if line_number == total_lines: + stripped_last_line = physical_line.rstrip() + if not stripped_last_line: + return 0, "W391 blank line at end of file" + if stripped_last_line == physical_line: + return len(physical_line), "W292 no newline at end of file" def maximum_line_length(physical_line, max_line_length, multiline): - """ - Limit all lines to a maximum of 79 characters. + r"""Limit all lines to a maximum of 79 characters. There are still many devices around that are limited to 80 character lines; plus, limiting windows to 80 characters makes it possible to have @@ -241,9 +226,8 @@ def maximum_line_length(physical_line, max_line_length, multiline): def blank_lines(logical_line, blank_lines, indent_level, line_number, - previous_logical, previous_indent_level): - r""" - Separate top-level function and class definitions with two blank lines. + blank_before, previous_logical, previous_indent_level): + r"""Separate top-level function and class definitions with two blank lines. Method definitions inside a class are separated by a single blank line. @@ -271,19 +255,18 @@ def blank_lines(logical_line, blank_lines, indent_level, line_number, yield 0, "E303 too many blank lines (%d)" % blank_lines elif logical_line.startswith(('def ', 'class ', '@')): if indent_level: - if not (blank_lines or previous_indent_level < indent_level or + if not (blank_before or previous_indent_level < indent_level or DOCSTRING_REGEX.match(previous_logical)): yield 0, "E301 expected 1 blank line, found 0" - elif blank_lines != 2: - yield 0, "E302 expected 2 blank lines, found %d" % blank_lines + elif blank_before != 2: + yield 0, "E302 expected 2 blank lines, found %d" % blank_before def extraneous_whitespace(logical_line): - """ - Avoid extraneous whitespace in the following situations: + r"""Avoid extraneous whitespace. + Avoid extraneous whitespace in these situations: - Immediately inside parentheses, brackets or braces. - - Immediately before a comma, semicolon, or colon. Okay: spam(ham[1], {eggs: 2}) @@ -312,8 +295,7 @@ def extraneous_whitespace(logical_line): def whitespace_around_keywords(logical_line): - r""" - Avoid extraneous whitespace around keywords. + r"""Avoid extraneous whitespace around keywords. Okay: True and False E271: True and False @@ -336,8 +318,7 @@ def whitespace_around_keywords(logical_line): def missing_whitespace(logical_line): - """ - JCR: Each comma, semicolon or colon should be followed by whitespace. + r"""Each comma, semicolon or colon should be followed by whitespace. Okay: [a, b] Okay: (3,) @@ -364,8 +345,7 @@ def missing_whitespace(logical_line): def indentation(logical_line, previous_logical, indent_char, indent_level, previous_indent_level): - r""" - Use 4 spaces per indentation level. + r"""Use 4 spaces per indentation level. For really old code that you don't want to mess up, you can continue to use 8-space tabs. @@ -391,15 +371,14 @@ def indentation(logical_line, previous_logical, indent_char, def continued_indentation(logical_line, tokens, indent_level, hang_closing, indent_char, noqa, verbose): - r""" - Continuation lines should align wrapped elements either vertically using - Python's implicit line joining inside parentheses, brackets and braces, or - using a hanging indent. + r"""Continuation lines indentation. - When using a hanging indent the following considerations should be applied: + Continuation lines should align wrapped elements either vertically + using Python's implicit line joining inside parentheses, brackets + and braces, or using a hanging indent. + When using a hanging indent these considerations should be applied: - there should be no arguments on the first line, and - - further indentation should be used to clearly distinguish itself as a continuation line. @@ -442,6 +421,7 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, # visual indents indent_chances = {} last_indent = tokens[0][2] + visual_indent = None # for each depth, memorize the visual indent column indent = [last_indent[1]] if verbose >= 3: @@ -452,8 +432,7 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, newline = row < start[0] - first_row if newline: row = start[0] - first_row - newline = (not last_token_multiline and - token_type not in (tokenize.NL, tokenize.NEWLINE)) + newline = not last_token_multiline and token_type not in NEWLINE if newline: # this is the beginning of a continuation line. @@ -575,24 +554,25 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, indent_chances[start[1]] = text last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] if indent_next and expand_indent(line) == indent_level + 4: + pos = (start[0], indent[0] + 4) if visual_indent: code = "E129 visually indented line" else: code = "E125 continuation line" - yield (last_indent, "%s with same indent as next logical line" % code) + yield pos, "%s with same indent as next logical line" % code def whitespace_before_parameters(logical_line, tokens): - """ - Avoid extraneous whitespace in the following situations: + r"""Avoid extraneous whitespace. - - Immediately before the open parenthesis that starts the argument - list of a function call. - - - Immediately before the open parenthesis that starts an indexing or - slicing. + Avoid extraneous whitespace in the following situations: + - before the open parenthesis that starts the argument list of a + function call. + - before the open parenthesis that starts an indexing or slicing. Okay: spam(1) E211: spam (1) @@ -619,11 +599,7 @@ def whitespace_before_parameters(logical_line, tokens): def whitespace_around_operator(logical_line): - r""" - Avoid extraneous whitespace in the following situations: - - - More than one space around an assignment (or other) operator to - align it with another. + r"""Avoid extraneous whitespace around an operator. Okay: a = 12 + 3 E221: a = 4 + 5 @@ -646,13 +622,15 @@ def whitespace_around_operator(logical_line): def missing_whitespace_around_operator(logical_line, tokens): - r""" + r"""Surround operators with a single space on either side. + - Always surround these binary operators with a single space on either side: assignment (=), augmented assignment (+=, -= etc.), - comparisons (==, <, >, !=, <>, <=, >=, in, not in, is, is not), + comparisons (==, <, >, !=, <=, >=, in, not in, is, is not), Booleans (and, or, not). - - Use spaces around arithmetic operators. + - If operators with different priorities are used, consider adding + whitespace around the operators with the lowest priorities. Okay: i = i + 1 Okay: submitted += 1 @@ -676,8 +654,7 @@ def missing_whitespace_around_operator(logical_line, tokens): prev_type = tokenize.OP prev_text = prev_end = None for token_type, text, start, end, line in tokens: - if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN): - # ERRORTOKEN is triggered by backticks in Python 3 + if token_type in SKIP_COMMENTS: continue if text in ('(', 'lambda'): parens += 1 @@ -717,14 +694,8 @@ def missing_whitespace_around_operator(logical_line, tokens): # Check if the operator is being used as a binary operator # Allow unary operators: -123, -x, +1. # Allow argument unpacking: foo(*args, **kwargs). - if prev_type == tokenize.OP: - binary_usage = (prev_text in '}])') - elif prev_type == tokenize.NAME: - binary_usage = (prev_text not in KEYWORDS) - else: - binary_usage = (prev_type not in SKIP_TOKENS) - - if binary_usage: + if (prev_text in '}])' if prev_type == tokenize.OP + else prev_text not in KEYWORDS): need_space = None elif text in WS_OPTIONAL_OPERATORS: need_space = None @@ -743,11 +714,7 @@ def missing_whitespace_around_operator(logical_line, tokens): def whitespace_around_comma(logical_line): - r""" - Avoid extraneous whitespace in the following situations: - - - More than one space around an assignment (or other) operator to - align it with another. + r"""Avoid extraneous whitespace after a comma or a colon. Note: these checks are disabled by default @@ -765,7 +732,8 @@ def whitespace_around_comma(logical_line): def whitespace_around_named_parameter_equals(logical_line, tokens): - """ + r"""Don't use spaces around the '=' sign in function arguments. + Don't use spaces around the '=' sign when used to indicate a keyword argument or a default parameter value. @@ -784,6 +752,8 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): prev_end = None message = "E251 unexpected spaces around keyword / parameter equals" for token_type, text, start, end, line in tokens: + if token_type == tokenize.NL: + continue if no_space: no_space = False if start != prev_end: @@ -801,8 +771,7 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): def whitespace_before_comment(logical_line, tokens): - """ - Separate inline comments by at least two spaces. + r"""Separate inline comments by at least two spaces. An inline comment is a comment on the same line as a statement. Inline comments should be separated by at least two spaces from the statement. @@ -840,8 +809,7 @@ def whitespace_before_comment(logical_line, tokens): def imports_on_separate_lines(logical_line): - r""" - Imports should usually be on separate lines. + r"""Imports should usually be on separate lines. Okay: import os\nimport sys E401: import sys, os @@ -860,13 +828,11 @@ def imports_on_separate_lines(logical_line): def compound_statements(logical_line): - r""" - Compound statements (multiple statements on the same line) are - generally discouraged. + r"""Compound statements (on the same line) are generally discouraged. While sometimes it's okay to put an if/for/while with a small body - on the same line, never do this for multi-clause statements. Also - avoid folding such long lines! + on the same line, never do this for multi-clause statements. + Also avoid folding such long lines! Okay: if foo == 'blah':\n do_blah_thing() Okay: do_one() @@ -906,8 +872,7 @@ def compound_statements(logical_line): def explicit_line_join(logical_line, tokens): - r""" - Avoid explicit line join between brackets. + r"""Avoid explicit line join between brackets. The preferred way of wrapping long lines is by using Python's implied line continuation inside parentheses, brackets and braces. Long lines can be @@ -941,7 +906,8 @@ def explicit_line_join(logical_line, tokens): def comparison_to_singleton(logical_line, noqa): - """ + r"""Comparison to singletons should use "is" or "is not". + Comparisons to singletons like None should always be done with "is" or "is not", never the equality operators. @@ -971,9 +937,7 @@ def comparison_to_singleton(logical_line, noqa): def comparison_negative(logical_line): - r""" - Negative comparison, either identity or membership, should be - done using "not in" and "is not". + r"""Negative comparison should be done using "not in" and "is not". Okay: if x not in y:\n pass Okay: assert (X in Y or X is Z) @@ -994,9 +958,9 @@ def comparison_negative(logical_line): def comparison_type(logical_line): - """ - Object type comparisons should always use isinstance() instead of - comparing types directly. + r"""Object type comparisons should always use isinstance(). + + Do not compare types directly. Okay: if isinstance(obj, int): E721: if type(obj) is type(1): @@ -1017,9 +981,7 @@ def comparison_type(logical_line): def python_3000_has_key(logical_line, noqa): - r""" - The {}.has_key() method is removed in the Python 3. - Use the 'in' operation instead. + r"""The {}.has_key() method is removed in Python 3: use the 'in' operator. Okay: if "alph" in d:\n print d["alph"] W601: assert d.has_key('alph') @@ -1030,14 +992,9 @@ def python_3000_has_key(logical_line, noqa): def python_3000_raise_comma(logical_line): - """ - When raising an exception, use "raise ValueError('message')" - instead of the older form "raise ValueError, 'message'". + r"""When raising an exception, use "raise ValueError('message')". - The paren-using form is preferred because when the exception arguments - are long or include string formatting, you don't need to use line - continuation characters thanks to the containing parentheses. The older - form is removed in Python 3. + The older form is removed in Python 3. Okay: raise DummyError("Message") W602: raise DummyError, "Message" @@ -1048,9 +1005,8 @@ def python_3000_raise_comma(logical_line): def python_3000_not_equal(logical_line): - """ - != can also be written <>, but this is an obsolete usage kept for - backwards compatibility only. New code should always use !=. + r"""New code should always use != instead of <>. + The older syntax is removed in Python 3. Okay: if a != 'no': @@ -1062,9 +1018,7 @@ def python_3000_not_equal(logical_line): def python_3000_backticks(logical_line): - """ - Backticks are removed in Python 3. - Use repr() instead. + r"""Backticks are removed in Python 3: use repr() instead. Okay: val = repr(1 + 2) W604: val = `1 + 2` @@ -1082,47 +1036,40 @@ def python_3000_backticks(logical_line): if '' == ''.encode(): # Python 2: implicit encoding. def readlines(filename): - f = open(filename) - try: + """Read the source code.""" + with open(filename) as f: return f.readlines() - finally: - f.close() isidentifier = re.compile(r'[a-zA-Z_]\w*').match stdin_get_value = sys.stdin.read else: # Python 3 def readlines(filename): - f = open(filename, 'rb') + """Read the source code.""" try: - coding, lines = tokenize.detect_encoding(f.readline) - f = TextIOWrapper(f, coding, line_buffering=True) - return [l.decode(coding) for l in lines] + f.readlines() + with open(filename, 'rb') as f: + (coding, lines) = tokenize.detect_encoding(f.readline) + f = TextIOWrapper(f, coding, line_buffering=True) + return [l.decode(coding) for l in lines] + f.readlines() except (LookupError, SyntaxError, UnicodeError): - f.close() - # Fall back if files are improperly declared - f = open(filename, encoding='latin-1') - return f.readlines() - finally: - f.close() + # Fall back if file encoding is improperly declared + with open(filename, encoding='latin-1') as f: + return f.readlines() isidentifier = str.isidentifier def stdin_get_value(): return TextIOWrapper(sys.stdin.buffer, errors='ignore').read() -readlines.__doc__ = " Read the source code." noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search def expand_indent(line): - r""" - Return the amount of indentation. + r"""Return the amount of indentation. + Tabs are expanded to the next multiple of 8. >>> expand_indent(' ') 4 >>> expand_indent('\t') 8 - >>> expand_indent(' \t') - 8 >>> expand_indent(' \t') 8 >>> expand_indent(' \t') @@ -1142,8 +1089,7 @@ def expand_indent(line): def mute_string(text): - """ - Replace contents with 'xxx' to prevent syntax matching. + """Replace contents with 'xxx' to prevent syntax matching. >>> mute_string('"abc"') '"xxx"' @@ -1175,7 +1121,7 @@ def parse_udiff(diff, patterns=None, parent='.'): continue if line[:3] == '@@ ': hunk_match = HUNK_REGEX.match(line) - row, nrows = [int(g or '1') for g in hunk_match.groups()] + (row, nrows) = [int(g or '1') for g in hunk_match.groups()] rv[path].update(range(row, row + nrows)) elif line[:3] == '+++': path = line[4:].split('\t', 1)[0] @@ -1203,8 +1149,8 @@ def normalize_paths(value, parent=os.curdir): def filename_match(filename, patterns, default=True): - """ - Check if patterns contains a pattern that matches filename. + """Check if patterns contains a pattern that matches filename. + If patterns is unspecified, this always returns True. """ if not patterns: @@ -1212,6 +1158,15 @@ def filename_match(filename, patterns, default=True): return any(fnmatch(filename, pattern) for pattern in patterns) +if COMMENT_WITH_NL: + def _is_eol_token(token): + return (token[0] in NEWLINE or + (token[0] == tokenize.COMMENT and token[1] == token[4])) +else: + def _is_eol_token(token): + return token[0] in NEWLINE + + ############################################################################## # Framework to run all checks ############################################################################## @@ -1221,9 +1176,7 @@ def filename_match(filename, patterns, default=True): def register_check(check, codes=None): - """ - Register a new check object. - """ + """Register a new check object.""" def _add_check(check, kind, codes, args): if check in _checks[kind]: _checks[kind][check][0].extend(codes or []) @@ -1241,9 +1194,9 @@ def _add_check(check, kind, codes, args): def init_checks_registry(): - """ - Register all globally visible functions where the first argument name - is 'physical_line' or 'logical_line'. + """Register all globally visible functions. + + The first argument name is either 'physical_line' or 'logical_line'. """ mod = inspect.getmodule(register_check) for (name, function) in inspect.getmembers(mod, inspect.isfunction): @@ -1252,9 +1205,7 @@ def init_checks_registry(): class Checker(object): - """ - Load a Python source file, tokenize it, check coding style. - """ + """Load a Python source file, tokenize it, check coding style.""" def __init__(self, filename=None, lines=None, options=None, report=None, **kwargs): @@ -1297,6 +1248,7 @@ def __init__(self, filename=None, lines=None, self.report_error = self.report.error def report_invalid_syntax(self): + """Check if the syntax is valid.""" (exc_type, exc) = sys.exc_info()[:2] if len(exc.args) > 1: offset = exc.args[1] @@ -1307,33 +1259,26 @@ def report_invalid_syntax(self): self.report_error(offset[0], offset[1] or 0, 'E901 %s: %s' % (exc_type.__name__, exc.args[0]), self.report_invalid_syntax) - report_invalid_syntax.__doc__ = " Check if the syntax is valid." def readline(self): - """ - Get the next line from the input buffer. - """ - self.line_number += 1 - if self.line_number > len(self.lines): + """Get the next line from the input buffer.""" + if self.line_number >= self.total_lines: return '' - line = self.lines[self.line_number - 1] + line = self.lines[self.line_number] + self.line_number += 1 if self.indent_char is None and line[:1] in WHITESPACE: self.indent_char = line[0] return line def run_check(self, check, argument_names): - """ - Run a check plugin. - """ + """Run a check plugin.""" arguments = [] for name in argument_names: arguments.append(getattr(self, name)) return check(*arguments) def check_physical(self, line): - """ - Run all physical checks on a raw input line. - """ + """Run all physical checks on a raw input line.""" self.physical_line = line for name, check, argument_names in self._physical_checks: result = self.run_check(check, argument_names) @@ -1344,78 +1289,67 @@ def check_physical(self, line): self.indent_char = line[0] def build_tokens_line(self): - """ - Build a logical line from tokens. - """ - self.mapping = [] + """Build a logical line from tokens.""" logical = [] comments = [] length = 0 - previous = None - for token in self.tokens: - (token_type, text) = token[0:2] + prev_row = prev_col = mapping = None + for token_type, text, start, end, line in self.tokens: + if token_type in SKIP_TOKENS: + continue + if not mapping: + mapping = [(0, start)] if token_type == tokenize.COMMENT: comments.append(text) continue - if token_type in SKIP_TOKENS: - continue if token_type == tokenize.STRING: text = mute_string(text) - if previous: - (end_row, end) = previous[3] - (start_row, start) = token[2] - if end_row != start_row: # different row - prev_text = self.lines[end_row - 1][end - 1] + if prev_row: + (start_row, start_col) = start + if prev_row != start_row: # different row + prev_text = self.lines[prev_row - 1][prev_col - 1] if prev_text == ',' or (prev_text not in '{[(' and text not in '}])'): - logical.append(' ') - length += 1 - elif end != start: # different column - fill = self.lines[end_row - 1][end:start] - logical.append(fill) - length += len(fill) - self.mapping.append((length, token)) + text = ' ' + text + elif prev_col != start_col: # different column + text = line[prev_col:start_col] + text logical.append(text) length += len(text) - previous = token + mapping.append((length, end)) + (prev_row, prev_col) = end self.logical_line = ''.join(logical) self.noqa = comments and noqa(''.join(comments)) - # With Python 2, if the line ends with '\r\r\n' the assertion fails - # assert self.logical_line.strip() == self.logical_line + return mapping def check_logical(self): - """ - Build a line from tokens and run all logical checks on it. - """ - self.build_tokens_line() + """Build a line from tokens and run all logical checks on it.""" self.report.increment_logical_line() - token0 = self.mapping[0][1] if self.mapping else self.tokens[0] - first_line = self.lines[token0[2][0] - 1] - indent = first_line[:token0[2][1]] - self.indent_level = expand_indent(indent) + mapping = self.build_tokens_line() + (start_row, start_col) = mapping[0][1] + start_line = self.lines[start_row - 1] + self.indent_level = expand_indent(start_line[:start_col]) + if self.blank_before < self.blank_lines: + self.blank_before = self.blank_lines if self.verbose >= 2: print(self.logical_line[:80].rstrip()) for name, check, argument_names in self._logical_checks: if self.verbose >= 4: print(' ' + name) - for result in self.run_check(check, argument_names) or (): - (offset, text) = result - if isinstance(offset, tuple): - (orig_number, orig_offset) = offset - else: - orig_number = token0[2][0] - orig_offset = token0[2][1] + offset - for token_offset, token in self.mapping: - if offset >= token_offset: - orig_number = token[2][0] - orig_offset = (token[2][1] + offset - token_offset) - self.report_error(orig_number, orig_offset, text, check) + for offset, text in self.run_check(check, argument_names) or (): + if not isinstance(offset, tuple): + for token_offset, pos in mapping: + if offset <= token_offset: + break + offset = (pos[0], pos[1] + offset - token_offset) + self.report_error(offset[0], offset[1], text, check) if self.logical_line: self.previous_indent_level = self.indent_level self.previous_logical = self.logical_line + self.blank_lines = 0 self.tokens = [] def check_ast(self): + """Build the file's AST and run all AST checks.""" try: tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) except (SyntaxError, TypeError): @@ -1427,6 +1361,7 @@ def check_ast(self): self.report_error(lineno, offset, text, check) def generate_tokens(self): + """Tokenize the file, run physical line checks and yield tokens.""" if self._io_error: self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) tokengen = tokenize.generate_tokens(self.readline) @@ -1438,11 +1373,9 @@ def generate_tokens(self): self.report_invalid_syntax() def maybe_check_physical(self, token): - """ - If appropriate (based on token), check current physical line(s). - """ + """If appropriate (based on token), check current physical line(s).""" # Called after every token, but act only on end of line. - if token[0] in (tokenize.NEWLINE, tokenize.NL): + if _is_eol_token(token): # Obviously, a newline token ends a single physical line. self.check_physical(token[4]) elif token[0] == tokenize.STRING and '\n' in token[1]: @@ -1470,19 +1403,17 @@ def maybe_check_physical(self, token): self.multiline = False def check_all(self, expected=None, line_offset=0): - """ - Run all checks on the input file. - """ + """Run all checks on the input file.""" self.report.init_file(self.filename, self.lines, expected, line_offset) + self.total_lines = len(self.lines) if self._ast_checks: self.check_ast() self.line_number = 0 self.indent_char = None - self.indent_level = 0 - self.previous_indent_level = 0 + self.indent_level = self.previous_indent_level = 0 self.previous_logical = '' self.tokens = [] - self.blank_lines = blank_lines_before_comment = 0 + self.blank_lines = self.blank_before = 0 parens = 0 for token in self.generate_tokens(): self.tokens.append(token) @@ -1500,32 +1431,35 @@ def check_all(self, expected=None, line_offset=0): elif text in '}])': parens -= 1 elif not parens: - if token_type == tokenize.NEWLINE: - if self.blank_lines < blank_lines_before_comment: - self.blank_lines = blank_lines_before_comment - self.check_logical() - self.blank_lines = blank_lines_before_comment = 0 - elif token_type == tokenize.NL: - if len(self.tokens) == 1: + if token_type in NEWLINE: + if token_type == tokenize.NEWLINE: + self.check_logical() + self.blank_before = 0 + elif len(self.tokens) == 1: # The physical line contains only this token. self.blank_lines += 1 del self.tokens[0] else: self.check_logical() - elif token_type == tokenize.COMMENT and len(self.tokens) == 1: - if blank_lines_before_comment < self.blank_lines: - blank_lines_before_comment = self.blank_lines - self.blank_lines = 0 - if COMMENT_WITH_NL: + elif COMMENT_WITH_NL and token_type == tokenize.COMMENT: + if len(self.tokens) == 1: # The comment also ends a physical line - text = text.rstrip('\r\n') - self.tokens = [(token_type, text) + token[2:]] + token = list(token) + token[1] = text.rstrip('\r\n') + token[3] = (token[2][0], token[2][1] + len(token[1])) + self.tokens = [tuple(token)] self.check_logical() + if len(self.tokens) > 1 and (token_type == tokenize.ENDMARKER and + self.tokens[-2][0] not in SKIP_TOKENS): + self.tokens.pop() + self.check_physical(self.tokens[-1][4]) + self.check_logical() return self.report.get_file_results() class BaseReport(object): """Collect the results of the checks.""" + print_filename = False def __init__(self, options): @@ -1588,8 +1522,7 @@ def get_count(self, prefix=''): for key in self.messages if key.startswith(prefix)]) def get_statistics(self, prefix=''): - """ - Get statistics for message codes that start with the prefix. + """Get statistics for message codes that start with the prefix. prefix='' matches all errors and warnings prefix='E' matches all errors @@ -1660,9 +1593,9 @@ def get_file_results(self): else: line = self.lines[line_number - 1] print(line.rstrip()) - print(' ' * offset + '^') + print(re.sub(r'\S', ' ', line[:offset]) + '^') if self._show_pep8 and doc: - print(doc.lstrip('\n').rstrip()) + print(' ' + doc.strip()) return self.file_errors @@ -1773,8 +1706,9 @@ def input_dir(self, dirname): runner(os.path.join(root, filename)) def excluded(self, filename, parent=None): - """ - Check if options.exclude contains a pattern that matches filename. + """Check if the file should be excluded. + + Check if 'options.exclude' contains a pattern that matches filename. """ if not self.options.exclude: return False @@ -1787,8 +1721,7 @@ def excluded(self, filename, parent=None): return filename_match(filename, self.options.exclude) def ignore_code(self, code): - """ - Check if the error code should be ignored. + """Check if the error code should be ignored. If 'options.select' contains a prefix of the error code, return False. Else, if 'options.ignore' contains a prefix of @@ -1801,7 +1734,8 @@ def ignore_code(self, code): not code.startswith(self.options.select)) def get_checks(self, argument_name): - """ + """Get all the checks for this category. + Find all globally visible functions where the first argument name starts with argument_name and which contain selected tests. """ @@ -1903,12 +1837,11 @@ def read_config(options, args, arglist, parser): # Second, parse the configuration for opt in config.options(pep8_section): + if opt.replace('_', '-') not in parser.config_options: + print(" unknown option '%s' ignored" % opt) + continue if options.verbose > 1: print(" %s = %s" % (opt, config.get(pep8_section, opt))) - if opt.replace('_', '-') not in parser.config_options: - print("Unknown option: '%s'\n not in [%s]" % - (opt, ' '.join(parser.config_options))) - sys.exit(1) normalized_opt = opt.replace('-', '_') opt_type = option_list[normalized_opt] if opt_type in ('int', 'count'): @@ -1979,6 +1912,14 @@ def process_options(arglist=None, parse_argv=False, config_file=None, def _main(): """Parse options and run checks on Python source.""" + import signal + + # Handle "Broken pipe" gracefully + try: + signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1)) + except AttributeError: + pass # not supported on Windows + pep8style = StyleGuide(parse_argv=True, config_file=True) options = pep8style.options if options.doctest or options.testsuite: diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py index 53bc0721..cb2b136b 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py @@ -1,2 +1,2 @@ -__version__ = '0.8' +__version__ = '0.8.2a0' diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py index e756ff42..70558324 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py @@ -15,7 +15,6 @@ try: import ast - iter_child_nodes = ast.iter_child_nodes except ImportError: # Python 2.5 import _ast as ast @@ -24,26 +23,6 @@ ast.ClassDef.decorator_list = () ast.FunctionDef.decorator_list = property(lambda s: s.decorators) - def iter_child_nodes(node): - """ - Yield all direct child nodes of *node*, that is, all fields that - are nodes and all items of fields that are lists of nodes. - """ - for name in node._fields: - field = getattr(node, name, None) - if isinstance(field, ast.AST): - yield field - elif isinstance(field, list): - for item in field: - yield item -# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally) -if hasattr(ast, 'Try'): - ast_TryExcept = ast.Try - ast_TryFinally = () -else: - ast_TryExcept = ast.TryExcept - ast_TryFinally = ast.TryFinally - from pyflakes import messages @@ -55,6 +34,55 @@ def getNodeType(node_class): def getNodeType(node_class): return node_class.__name__.upper() +# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally) +if PY32: + def getAlternatives(n): + if isinstance(n, (ast.If, ast.TryFinally)): + return [n.body] + if isinstance(n, ast.TryExcept): + return [n.body + n.orelse] + [[hdl] for hdl in n.handlers] +else: + def getAlternatives(n): + if isinstance(n, ast.If): + return [n.body] + if isinstance(n, ast.Try): + return [n.body + n.orelse] + [[hdl] for hdl in n.handlers] + + +class _FieldsOrder(dict): + """Fix order of AST node fields.""" + + def _get_fields(self, node_class): + # handle iter before target, and generators before element + fields = node_class._fields + if 'iter' in fields: + key_first = 'iter'.find + elif 'generators' in fields: + key_first = 'generators'.find + else: + key_first = 'value'.find + return tuple(sorted(fields, key=key_first, reverse=True)) + + def __missing__(self, node_class): + self[node_class] = fields = self._get_fields(node_class) + return fields + + +def iter_child_nodes(node, omit=None, _fields_order=_FieldsOrder()): + """ + Yield all direct child nodes of *node*, that is, all fields that + are nodes and all items of fields that are lists of nodes. + """ + for name in _fields_order[node.__class__]: + if name == omit: + continue + field = getattr(node, name, None) + if isinstance(field, ast.AST): + yield field + elif isinstance(field, list): + for item in field: + yield item + class Binding(object): """ @@ -82,8 +110,17 @@ def __repr__(self): self.source.lineno, id(self)) + def redefines(self, other): + return isinstance(other, Definition) and self.name == other.name + -class Importation(Binding): +class Definition(Binding): + """ + A binding that defines a function or a class. + """ + + +class Importation(Definition): """ A binding created by an import statement. @@ -91,11 +128,18 @@ class Importation(Binding): possibly including multiple dotted components. @type fullName: C{str} """ + def __init__(self, name, source): self.fullName = name + self.redefined = [] name = name.split('.')[0] super(Importation, self).__init__(name, source) + def redefines(self, other): + if isinstance(other, Importation): + return self.fullName == other.fullName + return isinstance(other, Definition) and self.name == other.name + class Argument(Binding): """ @@ -103,12 +147,6 @@ class Argument(Binding): """ -class Definition(Binding): - """ - A binding that defines a function or a class. - """ - - class Assignment(Binding): """ Represents binding a name with an explicit assignment. @@ -141,6 +179,7 @@ class ExportBinding(Binding): Names which are imported and not otherwise used but appear in the value of C{__all__} will not have an unused import warning reported for them. """ + def __init__(self, name, source, scope): if '__all__' in scope and isinstance(source, ast.AugAssign): self.names = list(scope['__all__'].names) @@ -305,8 +344,7 @@ def checkDeadScopes(self): which were imported but unused. """ for scope in self.deadScopes: - export = isinstance(scope.get('__all__'), ExportBinding) - if export: + if isinstance(scope.get('__all__'), ExportBinding): all_names = set(scope['__all__'].names) if not scope.importStarred and \ os.path.basename(self.filename) != '__init__.py': @@ -319,12 +357,20 @@ def checkDeadScopes(self): all_names = [] # Look for imported names that aren't used. - for importation in scope.values(): - if (isinstance(importation, Importation) and - not importation.used and - importation.name not in all_names): - self.report(messages.UnusedImport, - importation.source, importation.name) + for value in scope.values(): + if isinstance(value, Importation): + used = value.used or value.name in all_names + if not used: + messg = messages.UnusedImport + self.report(messg, value.source, value.name) + for node in value.redefined: + if isinstance(self.getParent(node), ast.For): + messg = messages.ImportShadowedByLoopVar + elif used: + continue + else: + messg = messages.RedefinedWhileUnused + self.report(messg, node, value.name, value.source) def pushScope(self, scopeClass=FunctionScope): self.scopeStack.append(scopeClass()) @@ -332,94 +378,77 @@ def pushScope(self, scopeClass=FunctionScope): def report(self, messageClass, *args, **kwargs): self.messages.append(messageClass(self.filename, *args, **kwargs)) - def hasParent(self, node, kind): - while hasattr(node, 'parent'): + def getParent(self, node): + # Lookup the first parent which is not Tuple, List or Starred + while True: node = node.parent - if isinstance(node, kind): - return True + if not hasattr(node, 'elts') and not hasattr(node, 'ctx'): + return node - def getCommonAncestor(self, lnode, rnode, stop=None): - if not stop: - stop = self.root + def getCommonAncestor(self, lnode, rnode, stop): + if stop in (lnode, rnode) or not (hasattr(lnode, 'parent') and + hasattr(rnode, 'parent')): + return None if lnode is rnode: return lnode - if stop in (lnode, rnode): - return stop - if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): - return if (lnode.depth > rnode.depth): return self.getCommonAncestor(lnode.parent, rnode, stop) - if (rnode.depth > lnode.depth): + if (lnode.depth < rnode.depth): return self.getCommonAncestor(lnode, rnode.parent, stop) return self.getCommonAncestor(lnode.parent, rnode.parent, stop) - def descendantOf(self, node, ancestors, stop=None): + def descendantOf(self, node, ancestors, stop): for a in ancestors: - if self.getCommonAncestor(node, a, stop) not in (stop, None): + if self.getCommonAncestor(node, a, stop): return True return False - def onFork(self, parent, lnode, rnode, items): - return (self.descendantOf(lnode, items, parent) ^ - self.descendantOf(rnode, items, parent)) - def differentForks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY""" - ancestor = self.getCommonAncestor(lnode, rnode) - if isinstance(ancestor, ast.If): - for fork in (ancestor.body, ancestor.orelse): - if self.onFork(ancestor, lnode, rnode, fork): + ancestor = self.getCommonAncestor(lnode, rnode, self.root) + parts = getAlternatives(ancestor) + if parts: + for items in parts: + if self.descendantOf(lnode, items, ancestor) ^ \ + self.descendantOf(rnode, items, ancestor): return True - elif isinstance(ancestor, ast_TryExcept): - body = ancestor.body + ancestor.orelse - for fork in [body] + [[hdl] for hdl in ancestor.handlers]: - if self.onFork(ancestor, lnode, rnode, fork): - return True - elif isinstance(ancestor, ast_TryFinally): - if self.onFork(ancestor, lnode, rnode, ancestor.body): - return True return False - def addBinding(self, node, value, reportRedef=True): + def addBinding(self, node, value): """ Called when a binding is altered. - `node` is the statement responsible for the change - - `value` is the optional new value, a Binding instance, associated - with the binding; if None, the binding is deleted if it exists. - - if `reportRedef` is True (default), rebinding while unused will be - reported. + - `value` is the new value, a Binding instance """ - redefinedWhileUnused = False - if not isinstance(self.scope, ClassScope): - for scope in self.scopeStack[::-1]: - existing = scope.get(value.name) - if (isinstance(existing, Importation) - and not existing.used - and (not isinstance(value, Importation) or - value.fullName == existing.fullName) - and reportRedef - and not self.differentForks(node, existing.source)): - redefinedWhileUnused = True + # assert value.source in (node, node.parent): + for scope in self.scopeStack[::-1]: + if value.name in scope: + break + existing = scope.get(value.name) + + if existing and not self.differentForks(node, existing.source): + + parent_stmt = self.getParent(value.source) + if isinstance(existing, Importation) and isinstance(parent_stmt, ast.For): + self.report(messages.ImportShadowedByLoopVar, + node, value.name, existing.source) + + elif scope is self.scope: + if (isinstance(parent_stmt, ast.comprehension) and + not isinstance(self.getParent(existing.source), + (ast.For, ast.comprehension))): + self.report(messages.RedefinedInListComp, + node, value.name, existing.source) + elif not existing.used and value.redefines(existing): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) - existing = self.scope.get(value.name) - if not redefinedWhileUnused and self.hasParent(value.source, ast.ListComp): - if (existing and reportRedef - and not self.hasParent(existing.source, (ast.For, ast.ListComp)) - and not self.differentForks(node, existing.source)): - self.report(messages.RedefinedInListComp, - node, value.name, existing.source) + elif isinstance(existing, Importation) and value.redefines(existing): + existing.redefined.append(node) - if (isinstance(existing, Definition) - and not existing.used - and not self.differentForks(node, existing.source)): - self.report(messages.RedefinedWhileUnused, - node, value.name, existing.source) - else: - self.scope[value.name] = value + self.scope[value.name] = value def getNodeHandler(self, node_class): try: @@ -488,12 +517,13 @@ def handleNodeStore(self, node): scope[name].used[1], name, scope[name].source) break - parent = getattr(node, 'parent', None) - if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): + parent_stmt = self.getParent(node) + if isinstance(parent_stmt, (ast.For, ast.comprehension)) or ( + parent_stmt != node.parent and + not self.isLiteralTupleUnpacking(parent_stmt)): binding = Binding(name, node) - elif (parent is not None and name == '__all__' and - isinstance(self.scope, ModuleScope)): - binding = ExportBinding(name, parent, self.scope) + elif name == '__all__' and isinstance(self.scope, ModuleScope): + binding = ExportBinding(name, node.parent, self.scope) else: binding = Assignment(name, node) if name in self.scope: @@ -512,10 +542,17 @@ def handleNodeDelete(self, node): except KeyError: self.report(messages.UndefinedName, node, name) - def handleChildren(self, tree): - for node in iter_child_nodes(tree): + def handleChildren(self, tree, omit=None): + for node in iter_child_nodes(tree, omit=omit): self.handleNode(node, tree) + def isLiteralTupleUnpacking(self, node): + if isinstance(node, ast.Assign): + for child in node.targets + [node.value]: + if not hasattr(child, 'elts'): + return False + return True + def isDocstring(self, node): """ Determine if the given node is a docstring, as long as it is at the @@ -559,16 +596,19 @@ def handleNode(self, node, parent): def handleDoctests(self, node): try: - docstring, node_lineno = self.getDocstring(node.body[0]) - if not docstring: - return - examples = self._getDoctestExamples(docstring) + (docstring, node_lineno) = self.getDocstring(node.body[0]) + examples = docstring and self._getDoctestExamples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for has inconsistent # leading whitespace: ... return + if not examples: + return node_offset = self.offset or (0, 0) self.pushScope() + underscore_in_builtins = '_' in self.builtIns + if not underscore_in_builtins: + self.builtIns.add('_') for example in examples: try: tree = compile(example.source, "", "exec", ast.PyCF_ONLY_AST) @@ -580,21 +620,18 @@ def handleDoctests(self, node): else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) - underscore_in_builtins = '_' in self.builtIns - if not underscore_in_builtins: - self.builtIns.add('_') self.handleChildren(tree) - if not underscore_in_builtins: - self.builtIns.remove('_') self.offset = node_offset + if not underscore_in_builtins: + self.builtIns.remove('_') self.popScope() def ignore(self, node): pass # "stmt" type nodes - DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = \ - TRYFINALLY = ASSERT = EXEC = EXPR = handleChildren + DELETE = PRINT = FOR = WHILE = IF = WITH = WITHITEM = RAISE = \ + TRYFINALLY = ASSERT = EXEC = EXPR = ASSIGN = handleChildren CONTINUE = BREAK = PASS = ignore @@ -617,7 +654,7 @@ def ignore(self, node): EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types - COMPREHENSION = KEYWORD = handleChildren + LISTCOMP = COMPREHENSION = KEYWORD = handleChildren def GLOBAL(self, node): """ @@ -628,54 +665,12 @@ def GLOBAL(self, node): NONLOCAL = GLOBAL - def LISTCOMP(self, node): - # handle generators before element - for gen in node.generators: - self.handleNode(gen, node) - self.handleNode(node.elt, node) - def GENERATOREXP(self, node): self.pushScope(GeneratorScope) - # handle generators before element - for gen in node.generators: - self.handleNode(gen, node) - self.handleNode(node.elt, node) - self.popScope() - - SETCOMP = GENERATOREXP - - def DICTCOMP(self, node): - self.pushScope(GeneratorScope) - for gen in node.generators: - self.handleNode(gen, node) - self.handleNode(node.key, node) - self.handleNode(node.value, node) + self.handleChildren(node) self.popScope() - def FOR(self, node): - """ - Process bindings for loop variables. - """ - vars = [] - - def collectLoopVars(n): - if isinstance(n, ast.Name): - vars.append(n.id) - elif isinstance(n, ast.expr_context): - return - else: - for c in iter_child_nodes(n): - collectLoopVars(c) - - collectLoopVars(node.target) - for varn in vars: - if (isinstance(self.scope.get(varn), Importation) - # unused ones will get an unused import warning - and self.scope[varn].used): - self.report(messages.ImportShadowedByLoopVar, - node, varn, self.scope[varn].source) - - self.handleChildren(node) + DICTCOMP = SETCOMP = GENERATOREXP def NAME(self, node): """ @@ -698,7 +693,11 @@ def NAME(self, node): raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def RETURN(self, node): - if node.value and not self.scope.returnValue: + if ( + node.value and + hasattr(self.scope, 'returnValue') and + not self.scope.returnValue + ): self.scope.returnValue = node.value self.handleNode(node.value, node) @@ -711,8 +710,8 @@ def YIELD(self, node): def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) - self.addBinding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) + self.addBinding(node, FunctionDefinition(node.name, node)) if self.withDoctest: self.deferFunction(lambda: self.handleDoctests(node)) @@ -766,7 +765,7 @@ def runFunction(): self.pushScope() for name in args: - self.addBinding(node, Argument(name, node), reportRedef=False) + self.addBinding(node, Argument(name, node)) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: @@ -818,11 +817,6 @@ def CLASSDEF(self, node): self.popScope() self.addBinding(node, ClassDefinition(node.name, node)) - def ASSIGN(self, node): - self.handleNode(node.value, node) - for target in node.targets: - self.handleNode(target, node) - def AUGASSIGN(self, node): self.handleNodeLoad(node.target) self.handleNode(node.value, node) @@ -868,9 +862,7 @@ def TRY(self, node): self.handleNode(child, node) self.exceptHandlers.pop() # Process the other nodes: "except:", "else:", "finally:" - for child in iter_child_nodes(node): - if child not in node.body: - self.handleNode(child, node) + self.handleChildren(node, omit='body') TRYEXCEPT = TRY diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index 553b1a69..6c12b39e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,16 +4,14 @@ # ================== -__version__ = '0.2.0' -__project__ = 'pylama_pylint' +__version__ = "0.2.1" +__project__ = "pylama_pylint" __author__ = "horneds " __license__ = "BSD" import sys - if sys.version_info >= (3, 0, 0): raise ImportError("pylama_pylint doesnt support python3") - from .main import Linter assert Linter diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py index 1e59829b..85398ff1 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py @@ -21,7 +21,7 @@ modname = 'astroid' -numversion = (1, 0, 1) +numversion = (1, 1, 1) version = '.'.join([str(num) for num in numversion]) install_requires = ['logilab-common >= 0.60.0'] diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py index d8b1b8cb..5ee11b3b 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py @@ -171,6 +171,9 @@ def getattr(self, name, context=None, lookupclass=True): def igetattr(self, name, context=None): """inferred getattr""" try: + # avoid recursively inferring the same attr on the same class + if context: + context.push((self._proxied, name)) # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) return _infer_stmts(self._wrap_attr(get_attr, context), context, @@ -200,6 +203,8 @@ def infer_call_result(self, caller, context=None): """infer what a class instance is returning when called""" infered = False for node in self._proxied.igetattr('__call__', context): + if node is YES: + continue for res in node.infer_call_result(caller, context): infered = True yield res @@ -254,7 +259,8 @@ def infer_call_result(self, caller, context): # instance of the class given as first argument. if (self._proxied.name == '__new__' and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): - return (x is YES and x or Instance(x) for x in caller.args[0].infer()) + return ((x is YES and x or Instance(x)) + for x in caller.args[0].infer()) return self._proxied.infer_call_result(caller, context) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py index 5001b7cb..dd9868db 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py @@ -5,6 +5,7 @@ import inspect import sys +import re from astroid import MANAGER, AstroidBuildingException from astroid.builder import AstroidBuilder @@ -12,6 +13,7 @@ _inspected_modules = {} +_identifier_re = r'^[A-Za-z_]\w*$' def _gi_build_stub(parent): """ @@ -23,9 +25,13 @@ def _gi_build_stub(parent): constants = {} methods = {} for name in dir(parent): - if not name or name.startswith("__"): - # GLib.IConv has a parameter named "" :/ + if name.startswith("__"): continue + + # Check if this is a valid name in python + if not re.match(_identifier_re, name): + continue + try: obj = getattr(parent, name) except: @@ -46,6 +52,12 @@ def _gi_build_stub(parent): str(obj).startswith(" (3, 0) # module specific transformation functions ##################################### @@ -67,6 +70,7 @@ def popleft(self): pass def remove(self, value): pass def reverse(self): pass def rotate(self, n): pass + def __iter__(self): return self ''') @@ -142,6 +146,10 @@ def geturl(self): module.locals[func_name] = func def subprocess_transform(module): + if PY3K: + communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) + else: + communicate = ('string', 'string') fake = AstroidBuilder(MANAGER).string_build(''' class Popen(object): @@ -156,7 +164,7 @@ def __init__(self, args, bufsize=0, executable=None, pass def communicate(self, input=None): - return ('string', 'string') + return %r def wait(self): return self.returncode def poll(self): @@ -167,7 +175,7 @@ def terminate(self): pass def kill(self): pass - ''') + ''' % (communicate, )) for func_name, func in fake.locals.items(): module.locals[func_name] = func @@ -220,6 +228,23 @@ def infer_first(node): fake_node = nodes.EmptyNode() fake_node.parent = class_node class_node.instance_attrs[attr] = [fake_node] + + fake = AstroidBuilder(MANAGER).string_build(''' +class %(name)s(tuple): + def _asdict(self): + return self.__dict__ + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + return new(cls, iterable) + def _replace(_self, **kwds): + result = _self._make(map(kwds.pop, %(fields)r, _self)) + if kwds: + raise ValueError('Got unexpected field names: %%r' %% list(kwds)) + return result + ''' % {'name': name, 'fields': attributes}) + class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] + class_node.locals['_make'] = fake.body[0].locals['_make'] + class_node.locals['_replace'] = fake.body[0].locals['_replace'] # we use UseInferenceDefault, we can't be a generator so return an iterator return iter([class_node]) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py index b088b205..b6ceff82 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py @@ -131,16 +131,26 @@ def file_build(self, path, modname=None): except ImportError: modname = splitext(basename(path))[0] # build astroid representation - node = self.string_build(data, modname, path) - node.file_encoding = encoding - return node + module = self._data_build(data, modname, path) + return self._post_build(module, encoding) def string_build(self, data, modname='', path=None): """build astroid from source code string and return rebuilded astroid""" module = self._data_build(data, modname, path) - self._manager.astroid_cache[module.name] = module + module.file_bytes = data.encode('utf-8') + return self._post_build(module, 'utf-8') + + def _post_build(self, module, encoding): + """handles encoding and delayed nodes + after a module has been built + """ + module.file_encoding = encoding + self._manager.cache_module(module) # post tree building steps after we stored the module in the cache: for from_node in module._from_nodes: + if from_node.modname == '__future__': + for symbol, _ in from_node.names: + module.future_imports.add(symbol) self.add_from_names_to_locals(from_node) # handle delayed assattr nodes for delayed in module._delayed_assattr: diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py index 29c97be5..35cce332 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py @@ -72,6 +72,11 @@ def infer_argument(self, funcnode, name, context): return iter((boundnode,)) if funcnode.type == 'classmethod': return iter((boundnode,)) + # if we have a method, extract one position + # from the index, so we'll take in account + # the extra parameter represented by `self` or `cls` + if funcnode.type in ('method', 'classmethod'): + argindex -= 1 # 2. search arg index try: return self.args[argindex].infer(context) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py index f5e81321..058e845e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py @@ -97,7 +97,7 @@ def ast_from_file(self, filepath, modname=None, fallback=True, source=False): modname = '.'.join(modpath_from_file(filepath)) except ImportError: modname = filepath - if modname in self.astroid_cache: + if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: return self.astroid_cache[modname] if source: from astroid.builder import AstroidBuilder @@ -281,11 +281,13 @@ def transform(self, node): """Call matching transforms for the given node if any and return the transformed node. """ - try: - transforms = self.transforms[type(node)] - except KeyError: - return node # no transform registered for this class of node - orig_node = node # copy the reference + cls = node.__class__ + if cls not in self.transforms: + # no transform registered for this class of node + return node + + transforms = self.transforms[cls] + orig_node = node # copy the reference for transform_func, predicate in transforms: if predicate is None or predicate(node): ret = transform_func(node) @@ -299,6 +301,9 @@ def transform(self, node): node = ret return node + def cache_module(self, module): + """Cache a module if no module with the same name is known yet.""" + self.astroid_cache.setdefault(module.name, module) class Project(object): diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py index 720cdce0..bb685a9e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py @@ -225,7 +225,8 @@ def inspect_build(self, module, modname=None, path=None): # in jython, java modules have no __doc__ (see #109562) node = build_module(modname) node.file = node.path = path and abspath(path) or path - MANAGER.astroid_cache[modname] = node + node.name = modname + MANAGER.cache_module(node) node.package = hasattr(module, '__path__') self._done = {} self.object_build(node, module) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py index d579913a..20bb664f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py @@ -25,6 +25,10 @@ import sys from itertools import chain +try: + from io import BytesIO +except ImportError: + from cStringIO import StringIO as BytesIO from logilab.common.compat import builtins from logilab.common.decorators import cached @@ -217,6 +221,8 @@ class Module(LocalsDictNodeNG): # the file from which as been extracted the astroid representation. It may # be None if the representation has been built from a built-in module file = None + # Alternatively, if built from a string/bytes, this can be set + file_bytes = None # encoding of python source file, so we can get unicode out of it (python2 # only) file_encoding = None @@ -230,6 +236,9 @@ class Module(LocalsDictNodeNG): # as value globals = None + # Future imports + future_imports = None + # names of python special attributes (handled by getattr impl.) special_attributes = set(('__name__', '__doc__', '__file__', '__path__', '__dict__')) @@ -242,9 +251,12 @@ def __init__(self, name, doc, pure_python=True): self.pure_python = pure_python self.locals = self.globals = {} self.body = [] + self.future_imports = set() @property def file_stream(self): + if self.file_bytes is not None: + return BytesIO(self.file_bytes) if self.file is not None: return open(self.file, 'rb') return None @@ -284,6 +296,8 @@ def getattr(self, name, context=None, ignore_locals=False): return [self.import_module(name, relative_only=True)] except AstroidBuildingException: raise NotFoundError(name) + except SyntaxError: + raise NotFoundError(name) except Exception:# XXX pylint tests never pass here; do we need it? import traceback traceback.print_exc() @@ -656,6 +670,34 @@ def _rec_get_names(args, names=None): # Class ###################################################################### + +def _is_metaclass(klass): + """ Return if the given class can be + used as a metaclass. + """ + if klass.name == 'type': + return True + for base in klass.bases: + try: + for baseobj in base.infer(): + if isinstance(baseobj, Instance): + # not abstract + return False + if baseobj is YES: + continue + if baseobj is klass: + continue + if not isinstance(baseobj, Class): + continue + if baseobj._type == 'metaclass': + return True + if _is_metaclass(baseobj): + return True + except InferenceError: + continue + return False + + def _class_type(klass, ancestors=None): """return a Class node type to differ metaclass, interface and exception from 'regular' classes @@ -663,7 +705,7 @@ def _class_type(klass, ancestors=None): # XXX we have to store ancestors in case we have a ancestor loop if klass._type is not None: return klass._type - if klass.name == 'type': + if _is_metaclass(klass): klass._type = 'metaclass' elif klass.name.endswith('Interface'): klass._type = 'interface' @@ -679,9 +721,14 @@ def _class_type(klass, ancestors=None): ancestors.add(klass) # print >> sys.stderr, '_class_type', repr(klass) for base in klass.ancestors(recurs=False): - if _class_type(base, ancestors) != 'class': - klass._type = base.type - break + name = _class_type(base, ancestors) + if name != 'class': + if name == 'metaclass' and not _is_metaclass(klass): + # don't propagate it if the current class + # can't be a metaclass + continue + klass._type = base.type + break if klass._type is None: klass._type = 'class' return klass._type @@ -801,8 +848,11 @@ def ancestors(self, recurs=True, context=None): try: for baseobj in stmt.infer(context): if not isinstance(baseobj, Class): - # duh ? - continue + if isinstance(baseobj, Instance): + baseobj = baseobj._proxied + else: + # duh ? + continue if baseobj in yielded: continue # cf xxx above yielded.add(baseobj) @@ -993,13 +1043,21 @@ class node raise InferenceError() _metaclass = None - def metaclass(self): - """ Return the metaclass of this class """ + def _explicit_metaclass(self): + """ Return the explicit defined metaclass + for the current class. + + An explicit defined metaclass is defined + either by passing the ``metaclass`` keyword argument + in the class definition line (Python 3) or by + having a ``__metaclass__`` class attribute. + """ if self._metaclass: # Expects this from Py3k TreeRebuilder try: - return next(self._metaclass.infer()) - except InferenceError: + return next(node for node in self._metaclass.infer() + if node is not YES) + except (InferenceError, StopIteration): return try: @@ -1013,3 +1071,18 @@ def metaclass(self): if infered is YES: # don't expose this return None return infered + + def metaclass(self): + """ Return the metaclass of this class. + + If this class does not define explicitly a metaclass, + then the first defined metaclass in ancestors will be used + instead. + """ + klass = self._explicit_metaclass() + if klass is None: + for parent in self.ancestors(): + klass = parent.metaclass() + if klass is not None: + break + return klass diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 3699bbbb..60634022 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -19,7 +19,7 @@ class Linter(BaseLinter): """ Check code with pylint. """ @staticmethod - def run(path, code=None, **meta): # noqa + def run(path, **meta): # noqa """ Pylint code checking. :return list: List of errors. diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py index b48272d5..86488fa5 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py @@ -19,19 +19,19 @@ modname = distname = 'pylint' -numversion = (1, 1, 0) +numversion = (1, 2, 1) version = '.'.join([str(num) for num in numversion]) if sys.version_info < (2, 6): - install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1', + install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.1', 'StringFormat'] else: - install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.0.1'] + install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.1'] license = 'GPL' description = "python code static checker" web = 'http://www.pylint.org' -mailinglist = "mailto://python-projects@lists.logilab.org" +mailinglist = "mailto://code-quality@python.org" author = 'Logilab' author_email = 'python-projects@lists.logilab.org' @@ -45,8 +45,8 @@ 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Debuggers', 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing', - ] + 'Topic :: Software Development :: Testing' + ] long_desc = """\ diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py index 9346904e..af7965be 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py @@ -91,8 +91,6 @@ def __init__(self, linter=None): self.name = self.name.lower() OptionsProviderMixIn.__init__(self) self.linter = linter - # messages that are active for the current check - self.active_msgs = set() def add_message(self, msg_id, line=None, node=None, args=None): """add a message of a given type""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py index 11198acb..8136d0f3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py @@ -58,6 +58,9 @@ BAD_FUNCTIONS.append('input') BAD_FUNCTIONS.append('file') +# Name categories that are always consistent with all naming conventions. +EXEMPT_NAME_CATEGORIES = set(('exempt', 'ignore')) + del re def in_loop(node): @@ -450,6 +453,12 @@ class BasicChecker(_BasicChecker): 'exec-used', 'Used when you use the "exec" statement (function for Python 3), to discourage its \ usage. That doesn\'t mean you can not use it !'), + 'W0123': ('Use of eval', + 'eval-used', + 'Used when you use the "eval" function, to discourage its ' + 'usage. Consider using `ast.literal_eval` for safely evaluating ' + 'strings containing Python expressions ' + 'from untrusted sources. '), 'W0141': ('Used builtin function %r', 'bad-builtin', 'Used when a black listed builtin function is used (see the ' @@ -680,7 +689,7 @@ def visit_exec(self, node): """just print a warning on exec statements""" self.add_message('exec-used', node=node) - @check_messages('bad-builtin', 'star-args', + @check_messages('bad-builtin', 'star-args', 'eval-used', 'exec-used', 'missing-reversed-argument', 'bad-reversed-sequence') def visit_callfunc(self, node): @@ -697,6 +706,8 @@ def visit_callfunc(self, node): self.add_message('exec-used', node=node) elif name == 'reversed': self._check_reversed(node) + elif name == 'eval': + self.add_message('eval-used', node=node) if name in self.config.bad_functions: self.add_message('bad-builtin', node=node, args=name) if node.starargs or node.kwargs: @@ -774,11 +785,14 @@ def _check_reversed(self, node): if argument is None: # nothing was infered # try to see if we have iter() - if (isinstance(node.args[0], astroid.CallFunc) and - node.args[0].func.name == 'iter'): - func = node.args[0].func.infer().next() - if is_builtin_object(func): - self.add_message('bad-reversed-sequence', node=node) + if isinstance(node.args[0], astroid.CallFunc): + try: + func = node.args[0].func.infer().next() + except InferenceError: + return + if (getattr(func, 'name', None) == 'iter' and + is_builtin_object(func)): + self.add_message('bad-reversed-sequence', node=node) return if isinstance(argument, astroid.Instance): @@ -811,79 +825,47 @@ def _check_reversed(self, node): # everything else is not a proper sequence for reversed() self.add_message('bad-reversed-sequence', node=node) +_NAME_TYPES = { + 'module': (MOD_NAME_RGX, 'module'), + 'const': (CONST_NAME_RGX, 'constant'), + 'class': (CLASS_NAME_RGX, 'class'), + 'function': (DEFAULT_NAME_RGX, 'function'), + 'method': (DEFAULT_NAME_RGX, 'method'), + 'attr': (DEFAULT_NAME_RGX, 'attribute'), + 'argument': (DEFAULT_NAME_RGX, 'argument'), + 'variable': (DEFAULT_NAME_RGX, 'variable'), + 'class_attribute': (CLASS_ATTRIBUTE_RGX, 'class attribute'), + 'inlinevar': (COMP_VAR_RGX, 'inline iteration'), +} + +def _create_naming_options(): + name_options = [] + for name_type, (rgx, human_readable_name) in _NAME_TYPES.iteritems(): + name_type = name_type.replace('_', '-') + name_options.append(( + '%s-rgx' % (name_type,), + {'default': rgx, 'type': 'regexp', 'metavar': '', + 'help': 'Regular expression matching correct %s names' % (human_readable_name,)})) + name_options.append(( + '%s-name-hint' % (name_type,), + {'default': rgx.pattern, 'type': 'string', 'metavar': '', + 'help': 'Naming hint for %s names' % (human_readable_name,)})) + + return tuple(name_options) + class NameChecker(_BasicChecker): msgs = { 'C0102': ('Black listed name "%s"', 'blacklisted-name', 'Used when the name is listed in the black list (unauthorized \ names).'), - 'C0103': ('Invalid %s name "%s"', + 'C0103': ('Invalid %s name "%s"%s', 'invalid-name', 'Used when the name doesn\'t match the regular expression \ associated to its type (constant, variable, class...).'), } - options = (('module-rgx', - {'default' : MOD_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'module names'} - ), - ('const-rgx', - {'default' : CONST_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'module level names'} - ), - ('class-rgx', - {'default' : CLASS_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'class names'} - ), - ('function-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'function names'} - ), - ('method-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'method names'} - ), - ('attr-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'instance attribute names'} - ), - ('argument-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'argument names'}), - ('variable-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'variable names'} - ), - ('class-attribute-rgx', - {'default' : CLASS_ATTRIBUTE_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'attribute names in class bodies'} - ), - ('inlinevar-rgx', - {'default' : COMP_VAR_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'list comprehension / generator expression variable \ - names'} - ), - # XXX use set + options = (# XXX use set ('good-names', {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'), 'type' :'csv', 'metavar' : '', @@ -896,7 +878,24 @@ class NameChecker(_BasicChecker): 'help' : 'Bad variable names which should always be refused, ' 'separated by a comma'} ), - ) + ('name-group', + {'default' : (), + 'type' :'csv', 'metavar' : '', + 'help' : ('Colon-delimited sets of names that determine each' + ' other\'s naming style when the name regexes' + ' allow several styles.')} + ), + ('include-naming-hint', + {'default': False, 'type' : 'yn', 'metavar' : '', + 'help': 'Include a hint for the correct naming format with invalid-name'} + ), + ) + _create_naming_options() + + + def __init__(self, linter): + _BasicChecker.__init__(self, linter) + self._name_category = {} + self._name_group = {} def open(self): self.stats = self.linter.add_stats(badname_module=0, @@ -907,6 +906,9 @@ def open(self): badname_inlinevar=0, badname_argument=0, badname_class_attribute=0) + for group in self.config.name_group: + for name_type in group.split(':'): + self._name_group[name_type] = 'group_%s' % (group,) @check_messages('blacklisted-name', 'invalid-name') def visit_module(self, node): @@ -968,6 +970,14 @@ def _recursive_check_names(self, args, node): else: self._recursive_check_names(arg.elts, node) + def _find_name_group(self, node_type): + return self._name_group.get(node_type, node_type) + + def _is_multi_naming_match(self, match): + return (match is not None and + match.lastgroup is not None and + match.lastgroup not in EXEMPT_NAME_CATEGORIES) + def _check_name(self, node_type, name, node): """check for a name using the type's regexp""" if is_inside_except(node): @@ -981,13 +991,21 @@ def _check_name(self, node_type, name, node): self.add_message('blacklisted-name', node=node, args=name) return regexp = getattr(self.config, node_type + '_rgx') - if regexp.match(name) is None: - type_label = {'inlinedvar': 'inlined variable', - 'const': 'constant', - 'attr': 'attribute', - 'class_attribute': 'class attribute' - }.get(node_type, node_type) - self.add_message('invalid-name', node=node, args=(type_label, name)) + match = regexp.match(name) + + if self._is_multi_naming_match(match): + name_group = self._find_name_group(node_type) + if name_group not in self._name_category: + self._name_category[name_group] = match.lastgroup + elif self._name_category[name_group] != match.lastgroup: + match = None + + if match is None: + type_label = _NAME_TYPES[node_type][1] + hint = '' + if self.config.include_naming_hint: + hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint')) + self.add_message('invalid-name', node=node, args=(type_label, name, hint)) self.stats['badname_' + node_type] += 1 @@ -1044,15 +1062,17 @@ def visit_function(self, node): isinstance(ancestor[node.name], astroid.Function): overridden = True break - if not overridden: - self._check_docstring(ftype, node) + self._check_docstring(ftype, node, + report_missing=not overridden) else: self._check_docstring(ftype, node) - def _check_docstring(self, node_type, node): + def _check_docstring(self, node_type, node, report_missing=True): """check the node has a non empty docstring""" docstring = node.doc if docstring is None: + if not report_missing: + return if node.body: lines = node.body[-1].lineno - node.body[0].lineno + 1 else: diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py index 3f0a22d3..f5e2783f 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py @@ -1,4 +1,4 @@ -# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -16,7 +16,9 @@ """classes checker for Python code """ from __future__ import generators + import sys + import astroid from astroid import YES, Instance, are_exclusive, AssAttr from astroid.bases import Generator @@ -30,6 +32,7 @@ NEXT_METHOD = '__next__' else: NEXT_METHOD = 'next' +ITER_METHODS = ('__iter__', '__getitem__') def class_is_abstract(node): """return true if the given class node should be considered as an abstract @@ -49,7 +52,7 @@ def class_is_abstract(node): compatibility for an unexpected reason. Please report this kind \ if you don\'t make sense of it.'), - 'E0202': ('An attribute affected in %s line %s hide this method', + 'E0202': ('An attribute defined in %s line %s hides this method', 'method-hidden', 'Used when a class defines a method which is hidden by an ' 'instance attribute from an ancestor class or set by some ' @@ -156,7 +159,15 @@ class implementing this interface'), 'bad-context-manager', 'Used when the __exit__ special method, belonging to a \ context manager, does not accept 3 arguments \ - (type, value, traceback).') + (type, value, traceback).'), + 'E0236': ('Invalid object %r in __slots__, must contain ' + 'only non empty strings', + 'invalid-slots-object', + 'Used when an invalid (non-string) object occurs in __slots__.'), + 'E0238': ('Invalid __slots__ object', + 'invalid-slots', + 'Used when an invalid __slots__ is found in class. ' + 'Only a string, an iterable or a sequence is permitted.') } @@ -239,9 +250,10 @@ def visit_class(self, node): try: node.local_attr('__init__') except astroid.NotFoundError: - self.add_message('W0232', args=node, node=node) + self.add_message('no-init', args=node, node=node) + self._check_slots(node) - @check_messages('E0203', 'W0201') + @check_messages('access-member-before-definition', 'attribute-defined-outside-init') def leave_class(self, cnode): """close a class node: check that instance attributes are defined in __init__ and check @@ -252,7 +264,7 @@ def leave_class(self, cnode): if cnode.type != 'metaclass': self._check_accessed_members(cnode, accessed) # checks attributes are defined in an allowed method such as __init__ - if 'W0201' not in self.active_msgs: + if not self.linter.is_message_enabled('attribute-defined-outside-init'): return defining_methods = self.config.defining_attr_methods for attr, nodes in cnode.instance_attrs.iteritems(): @@ -281,7 +293,7 @@ def leave_class(self, cnode): try: cnode.local_attr(attr) except astroid.NotFoundError: - self.add_message('W0201', args=attr, node=node) + self.add_message('attribute-defined-outside-init', args=attr, node=node) def visit_function(self, node): """check method arguments, overriding""" @@ -323,7 +335,7 @@ def visit_function(self, node): try: overridden = klass.instance_attr(node.name)[0] # XXX args = (overridden.root().name, overridden.fromlineno) - self.add_message('E0202', args=args, node=node) + self.add_message('method-hidden', args=args, node=node) except astroid.NotFoundError: pass @@ -333,6 +345,56 @@ def visit_function(self, node): elif node.name == '__exit__': self._check_exit(node) + def _check_slots(self, node): + if '__slots__' not in node.locals: + return + for slots in node.igetattr('__slots__'): + # check if __slots__ is a valid type + for meth in ITER_METHODS: + try: + slots.getattr(meth) + break + except astroid.NotFoundError: + continue + else: + self.add_message('invalid-slots', node=node) + continue + + if isinstance(slots, astroid.Const): + # a string, ignore the following checks + continue + if not hasattr(slots, 'itered'): + # we can't obtain the values, maybe a .deque? + continue + + if isinstance(slots, astroid.Dict): + values = [item[0] for item in slots.items] + else: + values = slots.itered() + if values is YES: + return + + for elt in values: + try: + self._check_slots_elt(elt) + except astroid.InferenceError: + continue + + def _check_slots_elt(self, elt): + for infered in elt.infer(): + if infered is YES: + continue + if (not isinstance(infered, astroid.Const) or + not isinstance(infered.value, str)): + self.add_message('invalid-slots-object', + args=infered.as_string(), + node=elt) + continue + if not infered.value: + self.add_message('invalid-slots-object', + args=infered.as_string(), + node=elt) + def _check_iter(self, node): try: infered = node.infer_call_result(node) @@ -370,7 +432,7 @@ def leave_function(self, node): if node.is_method(): if node.args.args is not None: self._first_attrs.pop() - if 'R0201' not in self.active_msgs: + if not self.linter.is_message_enabled('no-self-use'): return class_node = node.parent.frame() if (self._meth_could_be_func and node.type == 'method' @@ -378,7 +440,7 @@ def leave_function(self, node): and not (node.is_abstract() or overrides_a_method(class_node, node.name)) and class_node.type != 'interface'): - self.add_message('R0201', node=node) + self.add_message('no-self-use', node=node) def visit_getattr(self, node): """check if the getattr is an access to a class member @@ -391,15 +453,17 @@ class member from outside its class (but ignore __special__ if self.is_first_attr(node): self._accessed[-1].setdefault(attrname, []).append(node) return - if 'W0212' not in self.active_msgs: + if not self.linter.is_message_enabled('protected-access'): return self._check_protected_attribute_access(node) - def visit_assign(self, assign_node): - if 'W0212' not in self.active_msgs: - return + def visit_assattr(self, node): + if isinstance(node.ass_type(), astroid.AugAssign) and self.is_first_attr(node): + self._accessed[-1].setdefault(node.attrname, []).append(node) + @check_messages('protected-access') + def visit_assign(self, assign_node): node = assign_node.targets[0] if not isinstance(node, AssAttr): return @@ -432,7 +496,7 @@ def _check_protected_attribute_access(self, node): # We are not in a class, no remaining valid case if klass is None: - self.add_message('W0212', node=node, args=attrname) + self.add_message('protected-access', node=node, args=attrname) return # If the expression begins with a call to super, that's ok. @@ -444,7 +508,7 @@ def _check_protected_attribute_access(self, node): # We are in a class, one remaining valid cases, Klass._attr inside # Klass if not (callee == klass.name or callee in klass.basenames): - self.add_message('W0212', node=node, args=attrname) + self.add_message('protected-access', node=node, args=attrname) def visit_name(self, node): """check if the name handle an access to a class member @@ -460,8 +524,8 @@ def _check_accessed_members(self, node, accessed): for attr, nodes in accessed.iteritems(): # deactivate "except doesn't do anything", that's expected # pylint: disable=W0704 - # is it a class attribute ? try: + # is it a class attribute ? node.local_attr(attr) # yes, stop here continue @@ -480,6 +544,19 @@ def _check_accessed_members(self, node, accessed): except astroid.NotFoundError: pass else: + # filter out augment assignment nodes + defstmts = [stmt for stmt in defstmts if stmt not in nodes] + if not defstmts: + # only augment assignment for this node, no-member should be + # triggered by the typecheck checker + continue + # filter defstmts to only pick the first one when there are + # several assignments in the same scope + scope = defstmts[0].scope() + defstmts = [stmt for i, stmt in enumerate(defstmts) + if i == 0 or stmt.scope() is not scope] + # if there are still more than one, don't attempt to be smarter + # than we can be if len(defstmts) == 1: defstmt = defstmts[0] # check that if the node is accessed in the same method as @@ -489,8 +566,8 @@ def _check_accessed_members(self, node, accessed): for _node in nodes: if _node.frame() is frame and _node.fromlineno < lno \ and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')): - self.add_message('E0203', node=_node, - args=(attr, lno)) + self.add_message('access-member-before-definition', + node=_node, args=(attr, lno)) def _check_first_arg_for_type(self, node, metaclass=0): """check the name of first argument, expect: @@ -513,34 +590,34 @@ def _check_first_arg_for_type(self, node, metaclass=0): if (first_arg == 'self' or first_arg in self.config.valid_classmethod_first_arg or first_arg in self.config.valid_metaclass_classmethod_first_arg): - self.add_message('W0211', args=first, node=node) + self.add_message('bad-staticmethod-argument', args=first, node=node) return self._first_attrs[-1] = None # class / regular method with no args elif not node.args.args: - self.add_message('E0211', node=node) + self.add_message('no-method-argument', node=node) # metaclass elif metaclass: # metaclass __new__ or classmethod if node.type == 'classmethod': self._check_first_arg_config(first, self.config.valid_metaclass_classmethod_first_arg, node, - 'C0204', node.name) + 'bad-mcs-classmethod-argument', node.name) # metaclass regular method else: self._check_first_arg_config(first, - self.config.valid_classmethod_first_arg, node, 'C0203', + self.config.valid_classmethod_first_arg, node, 'bad-mcs-method-argument', node.name) # regular class else: # class method if node.type == 'classmethod': self._check_first_arg_config(first, - self.config.valid_classmethod_first_arg, node, 'C0202', + self.config.valid_classmethod_first_arg, node, 'bad-classmethod-argument', node.name) # regular method without self as argument elif first != 'self': - self.add_message('E0213', node=node) + self.add_message('no-self-argument', node=node) def _check_first_arg_config(self, first, config, node, message, method_name): @@ -568,8 +645,11 @@ def _check_bases_classes(self, node): continue # owner is not this class, it must be a parent class # check that the ancestor's method is not abstract + if method.name in node.locals: + # it is redefined as an attribute or with a descriptor + continue if method.is_abstract(pass_is_abstract=False): - self.add_message('W0223', node=node, + self.add_message('abstract-method', node=node, args=(method.name, owner.name)) def _check_interfaces(self, node): @@ -581,7 +661,7 @@ def iface_handler(obj): """filter interface objects, it should be classes""" if not isinstance(obj, astroid.Class): e0221_hack[0] = True - self.add_message('E0221', node=node, + self.add_message('interface-is-not-class', node=node, args=(obj.as_string(),)) return False return True @@ -598,7 +678,7 @@ def iface_handler(obj): try: method = node_method(node, name) except astroid.NotFoundError: - self.add_message('E0222', args=(name, iface.name), + self.add_message('missing-interface-method', args=(name, iface.name), node=node) continue # ignore inherited methods @@ -617,14 +697,15 @@ def iface_handler(obj): # Use as_string() for the message # FIXME: in case of multiple interfaces, find which one could not # be resolved - self.add_message('F0220', node=implements, + self.add_message('unresolved-interface', node=implements, args=(node.name, assignment.value.as_string())) def _check_init(self, node): """check that the __init__ method call super or ancestors'__init__ method """ - if not set(('W0231', 'W0233')) & self.active_msgs: + if (not self.linter.is_message_enabled('super-init-not-called') and + not self.linter.is_message_enabled('non-parent-init-called')): return klass_node = node.parent.frame() to_call = _ancestors_to_call(klass_node) @@ -647,13 +728,13 @@ def _check_init(self, node): del not_called_yet[klass] except KeyError: if klass not in to_call: - self.add_message('W0233', node=expr, args=klass.name) + self.add_message('non-parent-init-called', node=expr, args=klass.name) except astroid.InferenceError: continue for klass, method in not_called_yet.iteritems(): if klass.name == 'object' or method.parent.name == 'object': continue - self.add_message('W0231', args=klass.name, node=node) + self.add_message('super-init-not-called', args=klass.name, node=node) def _check_signature(self, method1, refmethod, class_type): """check that the signature of the two given methods match @@ -662,7 +743,7 @@ def _check_signature(self, method1, refmethod, class_type): """ if not (isinstance(method1, astroid.Function) and isinstance(refmethod, astroid.Function)): - self.add_message('F0202', args=(method1, refmethod), node=method1) + self.add_message('method-check-failed', args=(method1, refmethod), node=method1) return # don't care about functions with unknown argument (builtins) if method1.args.args is None or refmethod.args.args is None: @@ -673,9 +754,9 @@ def _check_signature(self, method1, refmethod, class_type): if is_attr_private(method1.name): return if len(method1.args.args) != len(refmethod.args.args): - self.add_message('W0221', args=class_type, node=method1) + self.add_message('arguments-differ', args=class_type, node=method1) elif len(method1.args.defaults) < len(refmethod.args.defaults): - self.add_message('W0222', args=class_type, node=method1) + self.add_message('signature-differs', args=class_type, node=method1) def is_first_attr(self, node): """Check that attribute lookup name use first attribute variable name diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py index cfd2d808..c9ef4dfa 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py @@ -185,15 +185,18 @@ def close(self): """check that abstract/interface classes are used""" for abstract in self._abstracts: if not abstract in self._used_abstracts: - self.add_message('R0921', node=abstract) + self.add_message('abstract-class-not-used', node=abstract) elif self._used_abstracts[abstract] < 2: - self.add_message('R0922', node=abstract, + self.add_message('abstract-class-little-used', node=abstract, args=self._used_abstracts[abstract]) for iface in self._ifaces: if not iface in self._used_ifaces: - self.add_message('R0923', node=iface) + self.add_message('interface-not-implemented', node=iface) - @check_messages('R0901', 'R0902', 'R0903', 'R0904', 'R0921', 'R0922', 'R0923') + @check_messages('too-many-ancestors', 'too-many-instance-attributes', + 'too-few-public-methods', 'too-many-public-methods', + 'abstract-class-not-used', 'abstract-class-little-used', + 'interface-not-implemented') def visit_class(self, node): """check size of inheritance hierarchy and number of instance attributes """ @@ -201,13 +204,13 @@ def visit_class(self, node): # Is the total inheritance hierarchy is 7 or less? nb_parents = len(list(node.ancestors())) if nb_parents > self.config.max_parents: - self.add_message('R0901', node=node, + self.add_message('too-many-ancestors', node=node, args=(nb_parents, self.config.max_parents)) # Does the class contain less than 20 attributes for # non-GUI classes (40 for GUI)? # FIXME detect gui classes if len(node.instance_attrs) > self.config.max_attributes: - self.add_message('R0902', node=node, + self.add_message('too-many-instance-attributes', node=node, args=(len(node.instance_attrs), self.config.max_attributes)) # update abstract / interface classes structures @@ -231,7 +234,10 @@ def visit_class(self, node): except KeyError: self._used_abstracts[parent] = 1 - @check_messages('R0901', 'R0902', 'R0903', 'R0904', 'R0921', 'R0922', 'R0923') + @check_messages('too-many-ancestors', 'too-many-instance-attributes', + 'too-few-public-methods', 'too-many-public-methods', + 'abstract-class-not-used', 'abstract-class-little-used', + 'interface-not-implemented') def leave_class(self, node): """check number of public methods""" nb_public_methods = 0 @@ -243,7 +249,7 @@ def leave_class(self, node): special_methods.add(method.name) # Does the class contain less than 20 public methods ? if nb_public_methods > self.config.max_public_methods: - self.add_message('R0904', node=node, + self.add_message('too-many-public-methods', node=node, args=(nb_public_methods, self.config.max_public_methods)) # stop here for exception, metaclass and interface classes @@ -255,7 +261,8 @@ def leave_class(self, node): args=(nb_public_methods, self.config.min_public_methods)) - @check_messages('R0911', 'R0912', 'R0913', 'R0914', 'R0915') + @check_messages('too-many-return-statements', 'too-many-branches', + 'too-many-arguments', 'too-many-locals', 'too-many-statements') def visit_function(self, node): """check function name, docstring, arguments, redefinition, variable names, max locals @@ -272,34 +279,34 @@ def visit_function(self, node): if self.config.ignored_argument_names.match(arg.name)]) argnum = len(args) - ignored_args_num if argnum > self.config.max_args: - self.add_message('R0913', node=node, + self.add_message('too-many-arguments', node=node, args=(len(args), self.config.max_args)) else: ignored_args_num = 0 # check number of local variables locnum = len(node.locals) - ignored_args_num if locnum > self.config.max_locals: - self.add_message('R0914', node=node, + self.add_message('too-many-locals', node=node, args=(locnum, self.config.max_locals)) # init statements counter self._stmts = 1 - @check_messages('R0911', 'R0912', 'R0913', 'R0914', 'R0915') + @check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements') def leave_function(self, node): """most of the work is done here on close: checks for max returns, branch, return in __init__ """ returns = self._returns.pop() if returns > self.config.max_returns: - self.add_message('R0911', node=node, + self.add_message('too-many-return-statements', node=node, args=(returns, self.config.max_returns)) branches = self._branches.pop() if branches > self.config.max_branches: - self.add_message('R0912', node=node, + self.add_message('too-many-branches', node=node, args=(branches, self.config.max_branches)) # check number of statements if self._stmts > self.config.max_statements: - self.add_message('R0915', node=node, + self.add_message('too-many-statements', node=node, args=(self._stmts, self.config.max_statements)) def visit_return(self, _): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py index f85deb73..84f92eaf 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py @@ -94,7 +94,8 @@ def infer_bases(klass): 'W0710': ('Exception doesn\'t inherit from standard "Exception" class', 'nonstandard-exception', 'Used when a custom exception class is raised but doesn\'t \ - inherit from the builtin "Exception" class.'), + inherit from the builtin "Exception" class.', + {'maxversion': (3, 0)}), 'W0711': ('Exception to catch is the result of a binary "%s" operation', 'binary-op-exception', 'Used when the exception to catch is of the form \ @@ -106,6 +107,11 @@ def infer_bases(klass): 'clauses. ' 'See http://www.python.org/dev/peps/pep-3110/', {'maxversion': (3, 0)}), + 'W0713': ('Indexing exceptions will not work on Python 3', + 'indexing-exception', + 'Indexing exceptions will not work on Python 3. Use ' + '`exception.args[index]` instead.', + {'maxversion': (3, 0)}), } @@ -134,8 +140,8 @@ class ExceptionsChecker(BaseChecker): ), ) - @check_messages('W0701', 'W0710', 'E0702', 'E0710', 'E0711', - 'bad-exception-context') + @check_messages('raising-string', 'nonstandard-exception', 'raising-bad-type', + 'raising-non-exception', 'notimplemented-raised', 'bad-exception-context') def visit_raise(self, node): """visit raise possibly inferring value""" # ignore empty raise @@ -172,22 +178,22 @@ def _check_raise_value(self, node, expr): if isinstance(expr, astroid.Const): value = expr.value if isinstance(value, str): - self.add_message('W0701', node=node) + self.add_message('raising-string', node=node) else: - self.add_message('E0702', node=node, + self.add_message('raising-bad-type', node=node, args=value.__class__.__name__) elif (isinstance(expr, astroid.Name) and \ expr.name in ('None', 'True', 'False')) or \ isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple, astroid.Module, astroid.Function)): - self.add_message('E0702', node=node, args=expr.name) + self.add_message('raising-bad-type', node=node, args=expr.name) elif ((isinstance(expr, astroid.Name) and expr.name == 'NotImplemented') or (isinstance(expr, astroid.CallFunc) and isinstance(expr.func, astroid.Name) and expr.func.name == 'NotImplemented')): - self.add_message('E0711', node=node) + self.add_message('notimplemented-raised', node=node) elif isinstance(expr, astroid.BinOp) and expr.op == '%': - self.add_message('W0701', node=node) + self.add_message('raising-string', node=node) elif isinstance(expr, (Instance, astroid.Class)): if isinstance(expr, Instance): expr = expr._proxied @@ -195,23 +201,36 @@ def _check_raise_value(self, node, expr): not inherit_from_std_ex(expr) and expr.root().name != BUILTINS_NAME): if expr.newstyle: - self.add_message('E0710', node=node) + self.add_message('raising-non-exception', node=node) else: - self.add_message('W0710', node=node) + self.add_message('nonstandard-exception', node=node) else: value_found = False else: value_found = False return value_found - @check_messages('W0712') + @check_messages('unpacking-in-except') def visit_excepthandler(self, node): """Visit an except handler block and check for exception unpacking.""" if isinstance(node.name, (astroid.Tuple, astroid.List)): - self.add_message('W0712', node=node) + self.add_message('unpacking-in-except', node=node) + @check_messages('indexing-exception') + def visit_subscript(self, node): + """ Look for indexing exceptions. """ + try: + for infered in node.value.infer(): + if not isinstance(infered, astroid.Instance): + continue + if inherit_from_std_ex(infered): + self.add_message('indexing-exception', node=node) + except astroid.InferenceError: + return - @check_messages('W0702', 'W0703', 'W0704', 'W0711', 'E0701', 'catching-non-exception') + @check_messages('bare-except', 'broad-except', 'pointless-except', + 'binary-op-exception', 'bad-except-order', + 'catching-non-exception') def visit_tryexcept(self, node): """check for empty except""" exceptions_classes = [] @@ -219,18 +238,18 @@ def visit_tryexcept(self, node): for index, handler in enumerate(node.handlers): # single except doing nothing but "pass" without else clause if nb_handlers == 1 and is_empty(handler.body) and not node.orelse: - self.add_message('W0704', node=handler.type or handler.body[0]) + self.add_message('pointless-except', node=handler.type or handler.body[0]) if handler.type is None: if nb_handlers == 1 and not is_raising(handler.body): - self.add_message('W0702', node=handler) + self.add_message('bare-except', node=handler) # check if a "except:" is followed by some other # except elif index < (nb_handlers - 1): msg = 'empty except clause should always appear last' - self.add_message('E0701', node=node, args=msg) + self.add_message('bad-except-order', node=node, args=msg) elif isinstance(handler.type, astroid.BoolOp): - self.add_message('W0711', node=handler, args=handler.type.op) + self.add_message('binary-op-exception', node=handler, args=handler.type.op) else: try: excs = list(unpack_infer(handler.type)) @@ -246,11 +265,11 @@ def visit_tryexcept(self, node): if previous_exc in exc_ancestors: msg = '%s is an ancestor class of %s' % ( previous_exc.name, exc.name) - self.add_message('E0701', node=handler.type, args=msg) + self.add_message('bad-except-order', node=handler.type, args=msg) if (exc.name in self.config.overgeneral_exceptions and exc.root().name == EXCEPTIONS_MODULE and nb_handlers == 1 and not is_raising(handler.body)): - self.add_message('W0703', args=exc.name, node=handler.type) + self.add_message('broad-except', args=exc.name, node=handler.type) if (not inherit_from_std_ex(exc) and exc.root().name != BUILTINS_NAME): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py index e498af75..8b73049c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py @@ -35,6 +35,7 @@ from pylint.checkers.utils import check_messages from pylint.utils import WarningScope, OPTION_RGX +_CONTINUATION_BLOCK_OPENERS = ['elif', 'except', 'for', 'if', 'while', 'def', 'class'] _KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not', 'raise', 'return', 'while', 'yield'] if sys.version_info < (3, 0): @@ -45,8 +46,10 @@ '%=', '>>=', '<<='] _OPENING_BRACKETS = ['(', '[', '{'] _CLOSING_BRACKETS = [')', ']', '}'] +_TAB_LENGTH = 8 _EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT]) +_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL) # Whitespace checking policy constants _MUST = 0 @@ -77,6 +80,9 @@ 'bad-indentation', 'Used when an unexpected number of indentation\'s tabulations or ' 'spaces has been found.'), + 'C0330': ('Wrong %s indentation%s.\n%s%s', + 'bad-continuation', + 'TODO'), 'W0312': ('Found indentation with %ss instead of %ss', 'mixed-indentation', 'Used when there are some mixed tabs and spaces in a module.'), @@ -138,6 +144,261 @@ def _column_distance(token1, token2): return token2[2][1] - token1[3][1] +def _last_token_on_line_is(tokens, line_end, token): + return ( + line_end > 0 and tokens.token(line_end-1) == token or + line_end > 1 and tokens.token(line_end-2) == token + and tokens.type(line_end-1) == tokenize.COMMENT) + + +def _token_followed_by_eol(tokens, position): + return (tokens.type(position+1) == tokenize.NL or + tokens.type(position+1) == tokenize.COMMENT and + tokens.type(position+2) == tokenize.NL) + + +def _get_indent_length(line): + """Return the length of the indentation on the given token's line.""" + result = 0 + for char in line: + if char == ' ': + result += 1 + elif char == '\t': + result += _TAB_LENGTH + else: + break + return result + + +def _get_indent_hint_line(bar_positions, bad_position): + """Return a line with |s for each of the positions in the given lists.""" + if not bar_positions: + return '' + markers = [(pos, '|') for pos in bar_positions] + markers.append((bad_position, '^')) + markers.sort() + line = [' '] * (markers[-1][0] + 1) + for position, marker in markers: + line[position] = marker + return ''.join(line) + + +class _ContinuedIndent(object): + __slots__ = ('valid_outdent_offsets', + 'valid_continuation_offsets', + 'context_type', + 'token', + 'position') + + def __init__(self, + context_type, + token, + position, + valid_outdent_offsets, + valid_continuation_offsets): + self.valid_outdent_offsets = valid_outdent_offsets + self.valid_continuation_offsets = valid_continuation_offsets + self.context_type = context_type + self.position = position + self.token = token + + +# The contexts for hanging indents. +# A hanging indented dictionary value after : +HANGING_DICT_VALUE = 'dict-value' +# Hanging indentation in an expression. +HANGING = 'hanging' +# Hanging indentation in a block header. +HANGING_BLOCK = 'hanging-block' +# Continued indentation inside an expression. +CONTINUED = 'continued' +# Continued indentation in a block header. +CONTINUED_BLOCK = 'continued-block' + +SINGLE_LINE = 'single' +WITH_BODY = 'multi' + +_CONTINUATION_MSG_PARTS = { + HANGING_DICT_VALUE: ('hanging', ' in dict value'), + HANGING: ('hanging', ''), + HANGING_BLOCK: ('hanging', ' before block'), + CONTINUED: ('continued', ''), + CONTINUED_BLOCK: ('continued', ' before block'), +} + + +def _Offsets(*args): + """Valid indentation offsets for a continued line.""" + return dict((a, None) for a in args) + + +def _BeforeBlockOffsets(single, with_body): + """Valid alternative indent offsets for continued lines before blocks. + + :param single: Valid offset for statements on a single logical line. + :param with_body: Valid offset for statements on several lines. + """ + return {single: SINGLE_LINE, with_body: WITH_BODY} + + +class TokenWrapper(object): + """A wrapper for readable access to token information.""" + + def __init__(self, tokens): + self._tokens = tokens + + def token(self, idx): + return self._tokens[idx][1] + + def type(self, idx): + return self._tokens[idx][0] + + def start_line(self, idx): + return self._tokens[idx][2][0] + + def start_col(self, idx): + return self._tokens[idx][2][1] + + def line(self, idx): + return self._tokens[idx][4] + + +class ContinuedLineState(object): + """Tracker for continued indentation inside a logical line.""" + + def __init__(self, tokens, config): + self._line_start = -1 + self._cont_stack = [] + self._is_block_opener = False + self.retained_warnings = [] + self._config = config + self._tokens = TokenWrapper(tokens) + + @property + def has_content(self): + return bool(self._cont_stack) + + @property + def _block_indent_size(self): + return len(self._config.indent_string.replace('\t', ' ' * _TAB_LENGTH)) + + @property + def _continuation_size(self): + return self._config.indent_after_paren + + def handle_line_start(self, pos): + """Record the first non-junk token at the start of a line.""" + if self._line_start > -1: + return + self._is_block_opener = self._tokens.token(pos) in _CONTINUATION_BLOCK_OPENERS + self._line_start = pos + + def next_physical_line(self): + """Prepares the tracker for a new physical line (NL).""" + self._line_start = -1 + self._is_block_opener = False + + def next_logical_line(self): + """Prepares the tracker for a new logical line (NEWLINE). + + A new logical line only starts with block indentation. + """ + self.next_physical_line() + self.retained_warnings = [] + self._cont_stack = [] + + def add_block_warning(self, token_position, state, valid_offsets): + self.retained_warnings.append((token_position, state, valid_offsets)) + + def get_valid_offsets(self, idx): + """"Returns the valid offsets for the token at the given position.""" + # The closing brace on a dict or the 'for' in a dict comprehension may + # reset two indent levels because the dict value is ended implicitly + stack_top = -1 + if self._tokens.token(idx) in ('}', 'for') and self._cont_stack[-1].token == ':': + stack_top = -2 + indent = self._cont_stack[stack_top] + if self._tokens.token(idx) in _CLOSING_BRACKETS: + valid_offsets = indent.valid_outdent_offsets + else: + valid_offsets = indent.valid_continuation_offsets + return indent, valid_offsets.copy() + + def _hanging_indent_after_bracket(self, bracket, position): + """Extracts indentation information for a hanging indent.""" + indentation = _get_indent_length(self._tokens.line(position)) + if self._is_block_opener and self._continuation_size == self._block_indent_size: + return _ContinuedIndent( + HANGING_BLOCK, + bracket, + position, + _Offsets(indentation + self._continuation_size, indentation), + _BeforeBlockOffsets(indentation + self._continuation_size, + indentation + self._continuation_size * 2)) + elif bracket == ':': + if self._cont_stack[-1].context_type == CONTINUED: + # If the dict key was on the same line as the open brace, the new + # correct indent should be relative to the key instead of the + # current indent level + paren_align = self._cont_stack[-1].valid_outdent_offsets + next_align = self._cont_stack[-1].valid_continuation_offsets.copy() + next_align[next_align.keys()[0] + self._continuation_size] = True + else: + next_align = _Offsets(indentation + self._continuation_size, indentation) + paren_align = _Offsets(indentation + self._continuation_size, indentation) + return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align) + else: + return _ContinuedIndent( + HANGING, + bracket, + position, + _Offsets(indentation, indentation + self._continuation_size), + _Offsets(indentation + self._continuation_size)) + + def _continuation_inside_bracket(self, bracket, pos): + """Extracts indentation information for a continued indent.""" + indentation = _get_indent_length(self._tokens.line(pos)) + if self._is_block_opener and self._tokens.start_col(pos+1) - indentation == self._block_indent_size: + return _ContinuedIndent( + CONTINUED_BLOCK, + bracket, + pos, + _Offsets(self._tokens.start_col(pos)), + _BeforeBlockOffsets(self._tokens.start_col(pos+1), + self._tokens.start_col(pos+1) + self._continuation_size)) + else: + return _ContinuedIndent( + CONTINUED, + bracket, + pos, + _Offsets(self._tokens.start_col(pos)), + _Offsets(self._tokens.start_col(pos+1))) + + def pop_token(self): + self._cont_stack.pop() + + def push_token(self, token, position): + """Pushes a new token for continued indentation on the stack. + + Tokens that can modify continued indentation offsets are: + * opening brackets + * 'lambda' + * : inside dictionaries + + push_token relies on the caller to filter out those + interesting tokens. + + :param token: The concrete token + :param position: The position of the token in the stream. + """ + if _token_followed_by_eol(self._tokens, position): + self._cont_stack.append( + self._hanging_indent_after_bracket(token, position)) + else: + self._cont_stack.append( + self._continuation_inside_bracket(token, position)) + + class FormatChecker(BaseTokenChecker): """checks for : * unauthorized constructions @@ -180,24 +441,41 @@ class FormatChecker(BaseTokenChecker): {'default' : ' ', 'type' : "string", 'metavar' : '', 'help' : 'String used as indentation unit. This is usually \ " " (4 spaces) or "\\t" (1 tab).'}), + ('indent-after-paren', + {'type': 'int', 'metavar': '', 'default': 4, + 'help': 'Number of spaces of indent required inside a hanging ' + ' or continued line.'}), ) + def __init__(self, linter=None): BaseTokenChecker.__init__(self, linter) self._lines = None self._visited_lines = None + self._bracket_stack = [None] + + def _pop_token(self): + self._bracket_stack.pop() + self._current_line.pop_token() + + def _push_token(self, token, idx): + self._bracket_stack.append(token) + self._current_line.push_token(token, idx) - def new_line(self, tok_type, line, line_num, junk): + def new_line(self, tokens, line_end, line_start): """a new line has been encountered, process it if necessary""" - if not tok_type in junk: + if _last_token_on_line_is(tokens, line_end, ';'): + self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end)) + + line_num = tokens.start_line(line_start) + line = tokens.line(line_start) + if tokens.type(line_start) not in _JUNK_TOKENS: self._lines[line_num] = line.split('\n')[0] self.check_lines(line, line_num) def process_module(self, module): self._keywords_with_parens = set() - for node in module.body: - if (isinstance(node, nodes.From) and node.modname == '__future__' - and any(name == 'print_function' for name, _ in node.names)): - self._keywords_with_parens.add('print') + if 'print_function' in module.future_imports: + self._keywords_with_parens.add('print') def _check_keyword_parentheses(self, tokens, start): """Check that there are not unnecessary parens after a keyword. @@ -211,6 +489,8 @@ def _check_keyword_parentheses(self, tokens, start): start: int; the position of the keyword in the token list. """ # If the next token is not a paren, we're fine. + if self._inside_brackets(':') and tokens[start][1] == 'for': + self._pop_token() if tokens[start+1][1] != '(': return @@ -240,14 +520,14 @@ def _check_keyword_parentheses(self, tokens, start): return if keyword_token == 'not': if not found_and_or: - self.add_message('C0325', line=line_num, + self.add_message('superfluous-parens', line=line_num, args=keyword_token) elif keyword_token in ('return', 'yield'): - self.add_message('C0325', line=line_num, + self.add_message('superfluous-parens', line=line_num, args=keyword_token) elif keyword_token not in self._keywords_with_parens: if not (tokens[i+1][1] == 'in' and found_and_or): - self.add_message('C0325', line=line_num, + self.add_message('superfluous-parens', line=line_num, args=keyword_token) return elif depth == 1: @@ -270,7 +550,7 @@ def _check_keyword_parentheses(self, tokens, start): return def _opening_bracket(self, tokens, i): - self._bracket_stack.append(tokens[i][1]) + self._push_token(tokens[i][1], i) # Special case: ignore slices if tokens[i][1] == '[' and tokens[i+1][1] == ':': return @@ -283,7 +563,9 @@ def _opening_bracket(self, tokens, i): self._check_space(tokens, i, (_IGNORE, _MUST_NOT)) def _closing_bracket(self, tokens, i): - self._bracket_stack.pop() + if self._inside_brackets(':'): + self._pop_token() + self._pop_token() # Special case: ignore slices if tokens[i-1][1] == ':' and tokens[i][1] == ']': return @@ -302,7 +584,7 @@ def _check_equals_spacing(self, tokens, i): self._check_space(tokens, i, (_MUST, _MUST)) def _open_lambda(self, tokens, i): # pylint:disable=unused-argument - self._bracket_stack.append('lambda') + self._push_token('lambda', i) def _handle_colon(self, tokens, i): # Special case: ignore slices @@ -316,7 +598,9 @@ def _handle_colon(self, tokens, i): self._check_space(tokens, i, policy) if self._inside_brackets('lambda'): - self._bracket_stack.pop() + self._pop_token() + elif self._inside_brackets('{'): + self._push_token(':', i) def _handle_comma(self, tokens, i): # Only require a following whitespace if this is @@ -325,6 +609,8 @@ def _handle_comma(self, tokens, i): self._check_space(tokens, i, (_MUST_NOT, _IGNORE)) else: self._check_space(tokens, i, (_MUST_NOT, _MUST)) + if self._inside_brackets(':'): + self._pop_token() def _check_surrounded_by_space(self, tokens, i): """Check that a binary operator is surrounded by exactly one space.""" @@ -344,7 +630,7 @@ def _name_construct(token): return ':' elif tokens[i][1] in '()[]{}': return 'bracket' - elif tokens[i][1] in ('<', '>', '<=', '>=', '!='): + elif tokens[i][1] in ('<', '>', '<=', '>=', '!=', '=='): return 'comparison' else: if self._inside_brackets('('): @@ -376,13 +662,17 @@ def _name_construct(token): for policy, position in warnings: construct = _name_construct(tokens[i]) count, state = _policy_string(policy) - self.add_message('C0326', line=tokens[i][2][0], + self.add_message('bad-whitespace', line=tokens[i][2][0], args=(count, state, position, construct, _underline_token(tokens[i]))) def _inside_brackets(self, left): return self._bracket_stack[-1] == left + def _handle_old_ne_operator(self, tokens, i): + if tokens[i][1] == '<>': + self.add_message('old-ne-operator', line=tokens[i][2][0]) + def _prepare_token_dispatcher(self): raw = [ (_KEYWORD_TOKENS, @@ -401,6 +691,8 @@ def _prepare_token_dispatcher(self): ([':'], self._handle_colon), (['lambda'], self._open_lambda), + + (['<>'], self._handle_old_ne_operator), ] dispatch = {} @@ -419,76 +711,63 @@ def process_tokens(self, tokens): regular expression). """ self._bracket_stack = [None] - indent = tokenize.INDENT - dedent = tokenize.DEDENT - newline = tokenize.NEWLINE - junk = (tokenize.COMMENT, tokenize.NL) indents = [0] - check_equal = 0 + check_equal = False line_num = 0 - previous = None self._lines = {} self._visited_lines = {} - new_line_delay = False token_handlers = self._prepare_token_dispatcher() + + self._current_line = ContinuedLineState(tokens, self.config) for idx, (tok_type, token, start, _, line) in enumerate(tokens): - if new_line_delay: - new_line_delay = False - self.new_line(tok_type, line, line_num, junk) if start[0] != line_num: - if previous is not None and previous[0] == tokenize.OP and previous[1] == ';': - self.add_message('W0301', line=previous[2]) - previous = None line_num = start[0] # A tokenizer oddity: if an indented line contains a multi-line # docstring, the line member of the INDENT token does not contain - # the full line; therefore we delay checking the new line until - # the next token. + # the full line; therefore we check the next token on the line. if tok_type == tokenize.INDENT: - new_line_delay = True + self.new_line(TokenWrapper(tokens), idx-1, idx+1) else: - self.new_line(tok_type, line, line_num, junk) - if tok_type not in (indent, dedent, newline) + junk: - previous = tok_type, token, start[0] - - if tok_type == tokenize.OP: - if token == '<>': - self.add_message('W0331', line=line_num) - elif tok_type == tokenize.NUMBER: - if token.endswith('l'): - self.add_message('W0332', line=line_num) - - elif tok_type == newline: + self.new_line(TokenWrapper(tokens), idx-1, idx) + + if tok_type == tokenize.NEWLINE: # a program statement, or ENDMARKER, will eventually follow, # after some (possibly empty) run of tokens of the form # (NL | COMMENT)* (INDENT | DEDENT+)? # If an INDENT appears, setting check_equal is wrong, and will # be undone when we see the INDENT. - check_equal = 1 - - elif tok_type == indent: - check_equal = 0 + check_equal = True + self._process_retained_warnings(TokenWrapper(tokens), idx) + self._current_line.next_logical_line() + elif tok_type == tokenize.INDENT: + check_equal = False self.check_indent_level(token, indents[-1]+1, line_num) indents.append(indents[-1]+1) - - elif tok_type == dedent: + elif tok_type == tokenize.DEDENT: # there's nothing we need to check here! what's important is # that when the run of DEDENTs ends, the indentation of the # program statement (or ENDMARKER) that triggered the run is # equal to what's left at the top of the indents stack - check_equal = 1 + check_equal = True if len(indents) > 1: del indents[-1] - - elif check_equal and tok_type not in junk: - # this is the first "real token" following a NEWLINE, so it + elif tok_type == tokenize.NL: + self._check_continued_indentation(TokenWrapper(tokens), idx+1) + self._current_line.next_physical_line() + elif tok_type != tokenize.COMMENT: + self._current_line.handle_line_start(idx) + # This is the first concrete token following a NEWLINE, so it # must be the first token of the next program statement, or an # ENDMARKER; the "line" argument exposes the leading whitespace # for this statement; in the case of ENDMARKER, line is an empty # string, so will properly match the empty string with which the # "indents" stack was seeded - check_equal = 0 - self.check_indent_level(line, indents[-1], line_num) + if check_equal: + check_equal = False + self.check_indent_level(line, indents[-1], line_num) + + if tok_type == tokenize.NUMBER and token.endswith('l'): + self.add_message('lowercase-l-suffix', line=line_num) try: handler = token_handlers[token] @@ -499,9 +778,52 @@ def process_tokens(self, tokens): line_num -= 1 # to be ok with "wc -l" if line_num > self.config.max_module_lines: - self.add_message('C0302', args=line_num, line=1) + self.add_message('too-many-lines', args=line_num, line=1) + + def _process_retained_warnings(self, tokens, current_pos): + single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':') + + for indent_pos, state, offsets in self._current_line.retained_warnings: + block_type = offsets[tokens.start_col(indent_pos)] + hints = dict((k, v) for k, v in offsets.iteritems() + if v != block_type) + if single_line_block_stmt and block_type == WITH_BODY: + self._add_continuation_message(state, hints, tokens, indent_pos) + elif not single_line_block_stmt and block_type == SINGLE_LINE: + self._add_continuation_message(state, hints, tokens, indent_pos) + + def _check_continued_indentation(self, tokens, next_idx): + # Do not issue any warnings if the next line is empty. + if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL: + return - @check_messages('C0321', 'C03232', 'C0323', 'C0324') + state, valid_offsets = self._current_line.get_valid_offsets(next_idx) + # Special handling for hanging comments. If the last line ended with a + # comment and the new line contains only a comment, the line may also be + # indented to the start of the previous comment. + if (tokens.type(next_idx) == tokenize.COMMENT and + tokens.type(next_idx-2) == tokenize.COMMENT): + valid_offsets[tokens.start_col(next_idx-2)] = True + + # We can only decide if the indentation of a continued line before opening + # a new block is valid once we know of the body of the block is on the + # same line as the block opener. Since the token processing is single-pass, + # emitting those warnings is delayed until the block opener is processed. + if (state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK) + and tokens.start_col(next_idx) in valid_offsets): + self._current_line.add_block_warning(next_idx, state, valid_offsets) + elif tokens.start_col(next_idx) not in valid_offsets: + self._add_continuation_message(state, valid_offsets, tokens, next_idx) + + def _add_continuation_message(self, state, offsets, tokens, position): + readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type] + hint_line = _get_indent_hint_line(offsets, tokens.start_col(position)) + self.add_message( + 'bad-continuation', + line=tokens.start_line(position), + args=(readable_type, readable_position, tokens.line(position), hint_line)) + + @check_messages('multiple-statements') def visit_default(self, node): """check the node line number and check it if not yet done""" if not node.is_statement: @@ -555,12 +877,12 @@ def _check_multi_statement_line(self, node, line): if (isinstance(node.parent, nodes.If) and not node.parent.orelse and self.config.single_line_if_stmt): return - self.add_message('C0321', node=node) + self.add_message('multiple-statements', node=node) self._visited_lines[line] = 2 - @check_messages('W0333') + @check_messages('backtick') def visit_backquote(self, node): - self.add_message('W0333', node=node) + self.add_message('backtick', node=node) def check_lines(self, lines, i): """check lines have less than a maximum number of characters @@ -570,11 +892,11 @@ def check_lines(self, lines, i): for line in lines.splitlines(True): if not line.endswith('\n'): - self.add_message('C0304', line=i) + self.add_message('missing-final-newline', line=i) else: stripped_line = line.rstrip() if line[len(stripped_line):] not in ('\n', '\r\n'): - self.add_message('C0303', line=i) + self.add_message('trailing-whitespace', line=i) # Don't count excess whitespace in the line length. line = stripped_line mobj = OPTION_RGX.search(line) @@ -582,7 +904,7 @@ def check_lines(self, lines, i): line = line.split('#')[0].rstrip() if len(line) > max_chars and not ignore_long_line.search(line): - self.add_message('C0301', line=i, args=(len(line), max_chars)) + self.add_message('line-too-long', line=i, args=(len(line), max_chars)) i += 1 def check_indent_level(self, string, expected, line_num): @@ -603,7 +925,7 @@ def check_indent_level(self, string, expected, line_num): args = ('tab', 'space') else: args = ('space', 'tab') - self.add_message('W0312', args=args, line=line_num) + self.add_message('mixed-indentation', args=args, line=line_num) return level suppl += string[0] string = string[1:] @@ -611,7 +933,7 @@ def check_indent_level(self, string, expected, line_num): i_type = 'spaces' if indent[0] == '\t': i_type = 'tabs' - self.add_message('W0311', line=line_num, + self.add_message('bad-indentation', line=line_num, args=(level * unit_size + len(suppl), i_type, expected * unit_size)) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py index df4304a4..8b73c6f6 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py @@ -15,8 +15,10 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """imports checkers for Python code""" +import sys + from logilab.common.graph import get_cycles, DotBackend -from logilab.common.modutils import is_standard_module +from logilab.common.modutils import get_module_part, is_standard_module from logilab.common.ureports import VerbatimText, Paragraph import astroid @@ -148,7 +150,8 @@ def make_graph(filename, dep_info, sect, gtype): 'W0410': ('__future__ import is not the first non docstring statement', 'misplaced-future', 'Python 2.5 and greater require __future__ import to be the \ - first non docstring statement in the module.'), + first non docstring statement in the module.', + {'maxversion': (3, 0)}), } class ImportsChecker(BaseChecker): @@ -165,8 +168,12 @@ class ImportsChecker(BaseChecker): msgs = MSGS priority = -2 + if sys.version_info < (3,): + deprecated_modules = ('regsub', 'TERMIOS', 'Bastion', 'rexec') + else: + deprecated_modules = ('stringprep', 'optparse') options = (('deprecated-modules', - {'default' : ('regsub', 'TERMIOS', 'Bastion', 'rexec'), + {'default' : deprecated_modules, 'type' : 'csv', 'metavar' : '', 'help' : 'Deprecated modules which should not be used, \ @@ -217,9 +224,9 @@ def open(self): def close(self): """called before visiting project (i.e set of modules)""" # don't try to compute cycles if the associated message is disabled - if self.linter.is_message_enabled('R0401'): + if self.linter.is_message_enabled('cyclic-import'): for cycle in get_cycles(self.import_graph): - self.add_message('R0401', args=' -> '.join(cycle)) + self.add_message('cyclic-import', args=' -> '.join(cycle)) def visit_import(self, node): """triggered when an import statement is seen""" @@ -246,8 +253,11 @@ def visit_from(self, node): # consecutive future statements are possible if not (isinstance(prev, astroid.From) and prev.modname == '__future__'): - self.add_message('W0410', node=node) + self.add_message('misplaced-future', node=node) return + for name, _ in node.names: + if name == '*': + self.add_message('wildcard-import', args=basename, node=node) modnode = node.root() importedmodnode = self.get_imported_module(modnode, node, basename) if importedmodnode is None: @@ -255,11 +265,9 @@ def visit_from(self, node): self._check_relative_import(modnode, node, importedmodnode, basename) self._check_deprecated_module(node, basename) for name, _ in node.names: - if name == '*': - self.add_message('W0401', args=basename, node=node) - continue - self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name)) - self._check_reimport(node, name, basename, node.level) + if name != '*': + self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name)) + self._check_reimport(node, name, basename, node.level) def get_imported_module(self, modnode, importnode, modname): try: @@ -269,14 +277,14 @@ def get_imported_module(self, modnode, importnode, modname): args = '%r (%s)' % (modname, ex) else: args = repr(modname) - self.add_message("F0401", args=args, node=importnode) + self.add_message("import-error", args=args, node=importnode) def _check_relative_import(self, modnode, importnode, importedmodnode, importedasname): """check relative import. node is either an Import or From node, modname the imported module name. """ - if 'W0403' not in self.active_msgs: + if not self.linter.is_message_enabled('relative-import'): return if importedmodnode.file is None: return False # built-in module @@ -286,15 +294,16 @@ def _check_relative_import(self, modnode, importnode, importedmodnode, return False if importedmodnode.name != importedasname: # this must be a relative import... - self.add_message('W0403', args=(importedasname, importedmodnode.name), + self.add_message('relative-import', args=(importedasname, importedmodnode.name), node=importnode) def _add_imported_module(self, node, importedmodname): """notify an imported module, used to analyze dependencies""" + importedmodname = get_module_part(importedmodname) context_name = node.root().name if context_name == importedmodname: # module importing itself ! - self.add_message('W0406', node=node) + self.add_message('import-self', node=node) elif not is_standard_module(importedmodname): # handle dependencies importedmodnames = self.stats['dependencies'].setdefault( @@ -310,11 +319,11 @@ def _check_deprecated_module(self, node, mod_path): """check if the module is deprecated""" for mod_name in self.config.deprecated_modules: if mod_path == mod_name or mod_path.startswith(mod_name + '.'): - self.add_message('W0402', node=node, args=mod_path) + self.add_message('deprecated-module', node=node, args=mod_path) def _check_reimport(self, node, name, basename=None, level=None): """check if the import is necessary (i.e. not already done)""" - if 'W0404' not in self.active_msgs: + if not self.linter.is_message_enabled('reimported'): return frame = node.frame() root = node.root() @@ -324,7 +333,7 @@ def _check_reimport(self, node, name, basename=None, level=None): for context, level in contexts: first = get_first_import(node, context, name, basename, level) if first is not None: - self.add_message('W0404', node=node, + self.add_message('reimported', node=node, args=(name, first.fromlineno)) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py index a6b0145d..cbdf0f2a 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py @@ -61,66 +61,97 @@ class LoggingChecker(checkers.BaseChecker): name = 'logging' msgs = MSGS + options = (('logging-modules', + {'default' : ('logging',), + 'type' : 'csv', + 'metavar' : '', + 'help' : ('Logging modules to check that the string format ' + 'arguments are in logging function parameter format')} + ), + ) + def visit_module(self, unused_node): """Clears any state left in this checker from last module checked.""" # The code being checked can just as easily "import logging as foo", # so it is necessary to process the imports and store in this field # what name the logging module is actually given. - self._logging_name = None + self._logging_names = set() + logging_mods = self.config.logging_modules + + self._logging_modules = set(logging_mods) + self._from_imports = {} + for logging_mod in logging_mods: + parts = logging_mod.rsplit('.', 1) + if len(parts) > 1: + self._from_imports[parts[0]] = parts[1] + + def visit_from(self, node): + """Checks to see if a module uses a non-Python logging module.""" + try: + logging_name = self._from_imports[node.modname] + for module, as_name in node.names: + if module == logging_name: + self._logging_names.add(as_name or module) + except KeyError: + pass def visit_import(self, node): """Checks to see if this module uses Python's built-in logging.""" for module, as_name in node.names: - if module == 'logging': - if as_name: - self._logging_name = as_name - else: - self._logging_name = 'logging' + if module in self._logging_modules: + self._logging_names.add(as_name or module) @check_messages(*(MSGS.keys())) def visit_callfunc(self, node): - """Checks calls to (simple forms of) logging methods.""" - if (not isinstance(node.func, astroid.Getattr) - or not isinstance(node.func.expr, astroid.Name)): - return - try: - logger_class = [inferred for inferred in node.func.expr.infer() if ( - isinstance(inferred, astroid.Instance) - and any(ancestor for ancestor in inferred._proxied.ancestors() if ( - ancestor.name == 'Logger' - and ancestor.parent.name == 'logging')))] - except astroid.exceptions.InferenceError: - return - if node.func.expr.name != self._logging_name and not logger_class: - return - self._check_convenience_methods(node) - self._check_log_methods(node) + """Checks calls to logging methods.""" + def is_logging_name(): + return (isinstance(node.func, astroid.Getattr) and + isinstance(node.func.expr, astroid.Name) and + node.func.expr.name in self._logging_names) - def _check_convenience_methods(self, node): - """Checks calls to logging convenience methods (like logging.warn).""" - if node.func.attrname not in CHECKED_CONVENIENCE_FUNCTIONS: - return - if node.starargs or node.kwargs or not node.args: - # Either no args, star args, or double-star args. Beyond the - # scope of this checker. - return - if isinstance(node.args[0], astroid.BinOp) and node.args[0].op == '%': - self.add_message('W1201', node=node) - elif isinstance(node.args[0], astroid.Const): - self._check_format_string(node, 0) + def is_logger_class(): + try: + for inferred in node.func.infer(): + if isinstance(inferred, astroid.BoundMethod): + parent = inferred._proxied.parent + if (isinstance(parent, astroid.Class) and + (parent.qname() == 'logging.Logger' or + any(ancestor.qname() == 'logging.Logger' + for ancestor in parent.ancestors()))): + return True, inferred._proxied.name + except astroid.exceptions.InferenceError: + pass + return False, None + + if is_logging_name(): + name = node.func.attrname + else: + result, name = is_logger_class() + if not result: + return + self._check_log_method(node, name) - def _check_log_methods(self, node): + def _check_log_method(self, node, name): """Checks calls to logging.log(level, format, *format_args).""" - if node.func.attrname != 'log': - return - if node.starargs or node.kwargs or len(node.args) < 2: - # Either a malformed call, star args, or double-star args. Beyond - # the scope of this checker. + if name == 'log': + if node.starargs or node.kwargs or len(node.args) < 2: + # Either a malformed call, star args, or double-star args. Beyond + # the scope of this checker. + return + format_pos = 1 + elif name in CHECKED_CONVENIENCE_FUNCTIONS: + if node.starargs or node.kwargs or not node.args: + # Either no args, star args, or double-star args. Beyond the + # scope of this checker. + return + format_pos = 0 + else: return - if isinstance(node.args[1], astroid.BinOp) and node.args[1].op == '%': - self.add_message('W1201', node=node) - elif isinstance(node.args[1], astroid.Const): - self._check_format_string(node, 1) + + if isinstance(node.args[format_pos], astroid.BinOp) and node.args[format_pos].op == '%': + self.add_message('logging-not-lazy', node=node) + elif isinstance(node.args[format_pos], astroid.Const): + self._check_format_string(node, format_pos) def _check_format_string(self, node, format_arg): """Checks that format string tokens match the supplied arguments. @@ -149,16 +180,16 @@ def _check_format_string(self, node, format_arg): return except utils.UnsupportedFormatCharacter, ex: char = format_string[ex.index] - self.add_message('E1200', node=node, + self.add_message('logging-unsupported-format', node=node, args=(char, ord(char), ex.index)) return except utils.IncompleteFormatString: - self.add_message('E1201', node=node) + self.add_message('logging-format-truncated', node=node) return if num_args > required_num_args: - self.add_message('E1205', node=node) + self.add_message('logging-too-many-args', node=node) elif num_args < required_num_args: - self.add_message('E1206', node=node) + self.add_message('logging-too-few-args', node=node) def _count_supplied_tokens(args): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py index 9c49825e..d1b7c216 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py @@ -57,13 +57,13 @@ class EncodingChecker(BaseChecker): def _check_note(self, notes, lineno, line): match = notes.search(line) if match: - self.add_message('W0511', args=line[match.start():-1], line=lineno) + self.add_message('fixme', args=line[match.start():-1], line=lineno) def _check_encoding(self, lineno, line, file_encoding): try: return unicode(line, file_encoding) except UnicodeDecodeError, ex: - self.add_message('W0512', line=lineno, + self.add_message('invalid-encoded-data', line=lineno, args=(file_encoding, ex.args[2])) def process_module(self, module): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py index 027d512f..f801c443 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py @@ -1,4 +1,4 @@ -# Copyright (c) 2005-2013 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2005-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -26,10 +26,12 @@ MSGS = { 'E1001': ('Use of __slots__ on an old style class', 'slots-on-old-class', - 'Used when an old style class uses the __slots__ attribute.'), + 'Used when an old style class uses the __slots__ attribute.', + {'maxversion': (3, 0)}), 'E1002': ('Use of super on an old style class', 'super-on-old-class', - 'Used when an old style class uses the super builtin.'), + 'Used when an old style class uses the super builtin.', + {'maxversion': (3, 0)}), 'E1003': ('Bad first argument %r given to super()', 'bad-super-call', 'Used when another argument than the current class is given as \ @@ -37,17 +39,19 @@ 'E1004': ('Missing argument to super()', 'missing-super-argument', 'Used when the super builtin didn\'t receive an \ - argument on Python 2', + argument.', {'maxversion': (3, 0)}), 'W1001': ('Use of "property" on an old style class', 'property-on-old-class', 'Used when PyLint detect the use of the builtin "property" \ on an old style class while this is relying on new style \ - classes features'), + classes features.', + {'maxversion': (3, 0)}), 'C1001': ('Old-style class defined.', 'old-style-class', 'Used when a class is defined that does not inherit from another' - 'class and does not inherit explicitly from "object".') + 'class and does not inherit explicitly from "object".', + {'maxversion': (3, 0)}) } @@ -68,19 +72,19 @@ class NewStyleConflictChecker(BaseChecker): # configuration options options = () - @check_messages('E1001', 'C1001') + @check_messages('slots-on-old-class', 'old-style-class') def visit_class(self, node): """check __slots__ usage """ if '__slots__' in node and not node.newstyle: - self.add_message('E1001', node=node) + self.add_message('slots-on-old-class', node=node) # The node type could be class, exception, metaclass, or # interface. Presumably, the non-class-type nodes would always # have an explicit base class anyway. if not node.bases and node.type == 'class': - self.add_message('C1001', node=node) + self.add_message('old-style-class', node=node) - @check_messages('W1001') + @check_messages('property-on-old-class') def visit_callfunc(self, node): """check property usage""" parent = node.parent.frame() @@ -89,9 +93,9 @@ def visit_callfunc(self, node): isinstance(node.func, astroid.Name)): name = node.func.name if name == 'property': - self.add_message('W1001', node=node) + self.add_message('property-on-old-class', node=node) - @check_messages('E1002', 'E1003', 'E1004') + @check_messages('super-on-old-class', 'bad-super-call', 'missing-super-argument') def visit_function(self, node): """check use of super""" # ignore actual functions or method within a new style class @@ -109,7 +113,7 @@ def visit_function(self, node): call.func.name == 'super': if not klass.newstyle: # super should not be used on an old style class - self.add_message('E1002', node=node) + self.add_message('super-on-old-class', node=node) else: # super first arg should be the class if not call.args and sys.version_info[0] == 3: @@ -122,13 +126,24 @@ def visit_function(self, node): except astroid.InferenceError: continue - if supcls is None and sys.version_info[0] == 2: + if supcls is None: self.add_message('missing-super-argument', node=call) continue if klass is not supcls: - supcls = getattr(supcls, 'name', supcls) - self.add_message('E1003', node=call, args=(supcls, )) + name = None + # if supcls is not YES, then supcls was infered + # and use its name. Otherwise, try to look + # for call.args[0].name + if supcls is not astroid.YES: + name = supcls.name + else: + if hasattr(call.args[0], 'name'): + name = call.args[0].name + if name is not None: + self.add_message('bad-super-call', + node=call, + args=(name, )) def register(linter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py index 8cb78f4d..a1c31337 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py @@ -42,7 +42,7 @@ class OpenModeChecker(BaseChecker): 'See http://docs.python.org/2/library/functions.html#open'), } - @utils.check_messages('W1501') + @utils.check_messages('bad-open-mode') def visit_callfunc(self, node): """Visit a CallFunc node.""" if hasattr(node, 'func'): @@ -59,7 +59,7 @@ def _check_open_mode(self, node): mode_arg = utils.safe_infer(mode_arg) if (isinstance(mode_arg, astroid.Const) and not re.match(_VALID_OPEN_MODE_REGEX, mode_arg.value)): - self.add_message('W1501', node=node, args=(mode_arg.value)) + self.add_message('bad-open-mode', node=node, args=(mode_arg.value)) except (utils.NoSuchArgumentError, TypeError): pass diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py index b905d280..04cf1bc7 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py @@ -23,7 +23,7 @@ import astroid -from pylint.interfaces import ITokenChecker, IAstroidChecker +from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker from pylint.checkers import BaseChecker, BaseTokenChecker from pylint.checkers import utils from pylint.checkers.utils import check_messages @@ -102,15 +102,15 @@ def visit_binop(self, node): utils.parse_format_string(format_string) except utils.UnsupportedFormatCharacter, e: c = format_string[e.index] - self.add_message('E1300', node=node, args=(c, ord(c), e.index)) + self.add_message('bad-format-character', node=node, args=(c, ord(c), e.index)) return except utils.IncompleteFormatString: - self.add_message('E1301', node=node) + self.add_message('truncated-format-string', node=node) return if required_keys and required_num_args: # The format string uses both named and unnamed format # specifiers. - self.add_message('E1302', node=node) + self.add_message('mixed-format-string', node=node) elif required_keys: # The format string uses only named format specifiers. # Check that the RHS of the % operator is a mapping object @@ -125,7 +125,7 @@ def visit_binop(self, node): if isinstance(key, basestring): keys.add(key) else: - self.add_message('W1300', node=node, args=key) + self.add_message('bad-format-string-key', node=node, args=key) else: # One of the keys was something other than a # constant. Since we can't tell what it is, @@ -135,13 +135,13 @@ def visit_binop(self, node): if not unknown_keys: for key in required_keys: if key not in keys: - self.add_message('E1304', node=node, args=key) + self.add_message('missing-format-string-key', node=node, args=key) for key in keys: if key not in required_keys: - self.add_message('W1301', node=node, args=key) + self.add_message('unused-format-string-key', node=node, args=key) elif isinstance(args, OTHER_NODES + (astroid.Tuple,)): type_name = type(args).__name__ - self.add_message('E1303', node=node, args=type_name) + self.add_message('format-needs-mapping', node=node, args=type_name) # else: # The RHS of the format specifier is a name or # expression. It may be a mapping object, so @@ -162,9 +162,9 @@ def visit_binop(self, node): num_args = None if num_args is not None: if num_args > required_num_args: - self.add_message('E1305', node=node) + self.add_message('too-many-format-args', node=node) elif num_args < required_num_args: - self.add_message('E1306', node=node) + self.add_message('too-few-format-args', node=node) class StringMethodsChecker(BaseChecker): @@ -189,13 +189,13 @@ def visit_callfunc(self, node): if not isinstance(arg, astroid.Const): return if len(arg.value) != len(set(arg.value)): - self.add_message('E1310', node=node, + self.add_message('bad-str-strip-call', node=node, args=(func.bound.name, func.name)) class StringConstantChecker(BaseTokenChecker): """Check string literals""" - __implements__ = (ITokenChecker,) + __implements__ = (ITokenChecker, IRawChecker) name = 'string_constant' msgs = { 'W1401': ('Anomalous backslash in string: \'%s\'. ' @@ -221,6 +221,9 @@ class StringConstantChecker(BaseTokenChecker): # Unicode strings. UNICODE_ESCAPE_CHARACTERS = 'uUN' + def process_module(self, module): + self._unicode_literals = 'unicode_literals' in module.future_imports + def process_tokens(self, tokens): for (tok_type, token, (start_row, start_col), _, _) in tokens: if tok_type == tokenize.STRING: @@ -279,12 +282,14 @@ def process_non_raw_string_token(self, prefix, string_body, start_row, if next_char in self.UNICODE_ESCAPE_CHARACTERS: if 'u' in prefix: pass - elif _PY3K and 'b' not in prefix: + elif (_PY3K or self._unicode_literals) and 'b' not in prefix: pass # unicode by default else: - self.add_message('W1402', line=start_row, args=(match, )) + self.add_message('anomalous-unicode-escape-in-string', + line=start_row, args=(match, )) elif next_char not in self.ESCAPE_CHARACTERS: - self.add_message('W1401', line=start_row, args=(match, )) + self.add_message('anomalous-backslash-in-string', + line=start_row, args=(match, )) # Whether it was a valid escape or not, backslash followed by # another character can always be consumed whole: the second # character can never be the start of a new backslash escape. diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py index 5e9ae1b0..79774def 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py @@ -48,34 +48,73 @@ 'Used when an assignment is done on a function call but the \ inferred function returns nothing but None.'), - 'E1120': ('No value passed for parameter %s in function call', + 'E1120': ('No value for argument %s in %s call', 'no-value-for-parameter', 'Used when a function call passes too few arguments.'), - 'E1121': ('Too many positional arguments for function call', + 'E1121': ('Too many positional arguments for %s call', 'too-many-function-args', 'Used when a function call passes too many positional \ arguments.'), - 'E1122': ('Duplicate keyword argument %r in function call', + 'E1122': ('Duplicate keyword argument %r in %s call', 'duplicate-keyword-arg', 'Used when a function call passes the same keyword argument \ multiple times.', {'maxversion': (2, 6)}), - 'E1123': ('Passing unexpected keyword argument %r in function call', + 'E1123': ('Unexpected keyword argument %r in %s call', 'unexpected-keyword-arg', 'Used when a function call passes a keyword argument that \ doesn\'t correspond to one of the function\'s parameter names.'), - 'E1124': ('Parameter %r passed as both positional and keyword argument', + 'E1124': ('Argument %r passed by position and keyword in %s call', 'redundant-keyword-arg', 'Used when a function call would result in assigning multiple \ values to a function parameter, one value from a positional \ argument and one from a keyword argument.'), - 'E1125': ('Missing mandatory keyword argument %r', + 'E1125': ('Missing mandatory keyword argument %r in %s call', 'missing-kwoa', - 'Used when a function call doesn\'t pass a mandatory \ - keyword-only argument.', + ('Used when a function call does not pass a mandatory' + ' keyword-only argument.'), {'minversion': (3, 0)}), } +def _determine_callable(callable_obj): + # Ordering is important, since BoundMethod is a subclass of UnboundMethod, + # and Function inherits Lambda. + if isinstance(callable_obj, astroid.BoundMethod): + # Bound methods have an extra implicit 'self' argument. + return callable_obj, 1, callable_obj.type + elif isinstance(callable_obj, astroid.UnboundMethod): + return callable_obj, 0, 'unbound method' + elif isinstance(callable_obj, astroid.Function): + return callable_obj, 0, callable_obj.type + elif isinstance(callable_obj, astroid.Lambda): + return callable_obj, 0, 'lambda' + elif isinstance(callable_obj, astroid.Class): + # Class instantiation, lookup __new__ instead. + # If we only find object.__new__, we can safely check __init__ + # instead. + try: + # Use the last definition of __new__. + new = callable_obj.local_attr('__new__')[-1] + except astroid.NotFoundError: + new = None + + if not new or new.parent.scope().name == 'object': + try: + # Use the last definition of __init__. + callable_obj = callable_obj.local_attr('__init__')[-1] + except astroid.NotFoundError: + # do nothing, covered by no-init. + raise ValueError + else: + callable_obj = new + + if not isinstance(callable_obj, astroid.Function): + raise ValueError + # both have an extra implicit 'cls'/'self' argument. + return callable_obj, 1, 'constructor' + else: + raise ValueError + class TypeChecker(BaseChecker): """try to find bugs in the code using type inference """ @@ -94,7 +133,15 @@ class TypeChecker(BaseChecker): class should be ignored. A mixin class is detected if its name ends with \ "mixin" (case insensitive).'} ), - + ('ignored-modules', + {'default': (), + 'type': 'csv', + 'metavar': '', + 'help': 'List of module names for which member attributes \ +should not be checked (useful for modules/projects where namespaces are \ +manipulated during runtime and thus extisting member attributes cannot be \ +deduced by static analysis'}, + ), ('ignored-classes', {'default' : ('SQLObject',), 'type' : 'csv', @@ -132,7 +179,7 @@ def visit_assattr(self, node): def visit_delattr(self, node): self.visit_getattr(node) - @check_messages('E1101', 'E1103') + @check_messages('no-member', 'maybe-no-member') def visit_getattr(self, node): """check that the accessed attribute exists @@ -191,8 +238,8 @@ def visit_getattr(self, node): continue if isinstance(owner, Instance) and owner.has_dynamic_getattr(): continue - # explicit skipping of optparse'Values class - if owner.name == 'Values' and owner.root().name == 'optparse': + # explicit skipping of module member access + if owner.root().name in self.config.ignored_modules: continue missingattr.add((owner, name)) continue @@ -211,14 +258,14 @@ def visit_getattr(self, node): continue done.add(actual) if inference_failure: - msgid = 'E1103' + msgid = 'maybe-no-member' else: - msgid = 'E1101' + msgid = 'no-member' self.add_message(msgid, node=node, args=(owner.display_type(), name, node.attrname)) - @check_messages('E1111', 'W1111') + @check_messages('assignment-from-no-return', 'assignment-from-none') def visit_assign(self, node): """check that if assigning to a function call, the function is possibly returning something valuable @@ -236,14 +283,15 @@ def visit_assign(self, node): returns = list(function_node.nodes_of_class(astroid.Return, skip_klass=astroid.Function)) if len(returns) == 0: - self.add_message('E1111', node=node) + self.add_message('assignment-from-no-return', node=node) else: for rnode in returns: if not (isinstance(rnode.value, astroid.Const) - and rnode.value.value is None): + and rnode.value.value is None + or rnode.value is None): break else: - self.add_message('W1111', node=node) + self.add_message('assignment-from-none', node=node) @check_messages(*(MSGS.keys())) def visit_callfunc(self, node): @@ -251,7 +299,6 @@ def visit_callfunc(self, node): and that the arguments passed to the function match the parameters in the inferred function's definition """ - # Build the set of keyword arguments, checking for duplicate keywords, # and count the positional arguments. keyword_args = set() @@ -260,7 +307,7 @@ def visit_callfunc(self, node): if isinstance(arg, astroid.Keyword): keyword = arg.arg if keyword in keyword_args: - self.add_message('E1122', node=node, args=keyword) + self.add_message('duplicate-keyword-arg', node=node, args=keyword) keyword_args.add(keyword) else: num_positional_args += 1 @@ -268,26 +315,15 @@ def visit_callfunc(self, node): called = safe_infer(node.func) # only function, generator and object defining __call__ are allowed if called is not None and not called.callable(): - self.add_message('E1102', node=node, args=node.func.as_string()) - - # Note that BoundMethod is a subclass of UnboundMethod (huh?), so must - # come first in this 'if..else'. - if isinstance(called, astroid.BoundMethod): - # Bound methods have an extra implicit 'self' argument. - num_positional_args += 1 - elif isinstance(called, astroid.UnboundMethod): - if called.decorators is not None: - for d in called.decorators.nodes: - if isinstance(d, astroid.Name) and (d.name == 'classmethod'): - # Class methods have an extra implicit 'cls' argument. - num_positional_args += 1 - break - elif (isinstance(called, astroid.Function) or - isinstance(called, astroid.Lambda)): - pass - else: - return + self.add_message('not-callable', node=node, args=node.func.as_string()) + try: + called, implicit_args, callable_name = _determine_callable(called) + except ValueError: + # Any error occurred during determining the function type, most of + # those errors are handled by different warnings. + return + num_positional_args += implicit_args if called.args.args is None: # Built-in functions have no argument information. return @@ -342,7 +378,7 @@ def visit_callfunc(self, node): break else: # Too many positional arguments. - self.add_message('E1121', node=node) + self.add_message('too-many-function-args', node=node, args=(callable_name,)) break # 2. Match the keyword arguments. @@ -351,13 +387,13 @@ def visit_callfunc(self, node): i = parameter_name_to_index[keyword] if parameters[i][1]: # Duplicate definition of function parameter. - self.add_message('E1124', node=node, args=keyword) + self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name)) else: parameters[i][1] = True elif keyword in kwparams: if kwparams[keyword][1]: # XXX is that even possible? # Duplicate definition of function parameter. - self.add_message('E1124', node=node, args=keyword) + self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name)) else: kwparams[keyword][1] = True elif called.args.kwarg is not None: @@ -365,7 +401,7 @@ def visit_callfunc(self, node): pass else: # Unexpected keyword argument. - self.add_message('E1123', node=node, args=keyword) + self.add_message('unexpected-keyword-arg', node=node, args=(keyword, callable_name)) # 3. Match the *args, if any. Note that Python actually processes # *args _before_ any keyword arguments, but we wait until after @@ -402,12 +438,12 @@ def visit_callfunc(self, node): display_name = '' else: display_name = repr(name) - self.add_message('E1120', node=node, args=display_name) + self.add_message('no-value-for-parameter', node=node, args=(display_name, callable_name)) for name in kwparams: defval, assigned = kwparams[name] if defval is None and not assigned: - self.add_message('E1125', node=node, args=name) + self.add_message('missing-kwoa', node=node, args=(name, callable_name)) def register(linter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py index 7c489e8b..dc8d1115 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py @@ -1,4 +1,4 @@ -# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). +# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under @@ -146,6 +146,12 @@ def _get_unpacking_extra_info(node, infered): 'Used when something which is not ' 'a sequence is used in an unpack assignment'), + 'W0640': ('Cell variable %s defined in loop', + 'cell-var-from-loop', + 'A variable used in a closure is defined in a loop. ' + 'This will result in all closures using the same value for ' + 'the closed-over variable.'), + } class VariablesChecker(BaseChecker): @@ -170,8 +176,8 @@ class VariablesChecker(BaseChecker): ("dummy-variables-rgx", {'default': ('_$|dummy'), 'type' :'regexp', 'metavar' : '', - 'help' : 'A regular expression matching the beginning of \ - the name of dummy variables (i.e. not used).'}), + 'help' : 'A regular expression matching the name of dummy \ +variables (i.e. expectedly not used).'}), ("additional-builtins", {'default': (), 'type' : 'csv', 'metavar' : '', @@ -192,9 +198,9 @@ def visit_module(self, node): for name, stmts in node.locals.iteritems(): if is_builtin(name) and not is_inside_except(stmts[0]): # do not print Redefining builtin for additional builtins - self.add_message('W0622', args=name, node=stmts[0]) + self.add_message('redefined-builtin', args=name, node=stmts[0]) - @check_messages('W0611', 'W0614', 'W0622', 'E0603', 'E0604') + @check_messages('unused-import', 'unused-wildcard-import', 'redefined-builtin', 'undefined-all-variable', 'invalid-all-object') def leave_module(self, node): """leave module: check globals """ @@ -210,10 +216,11 @@ def leave_module(self, node): except astroid.InferenceError: continue - if not isinstance(elt_name, astroid.Const) or not isinstance(elt_name.value, basestring): - self.add_message('E0604', args=elt.as_string(), node=elt) + if not isinstance(elt_name, astroid.Const) \ + or not isinstance(elt_name.value, basestring): + self.add_message('invalid-all-object', args=elt.as_string(), node=elt) continue - elt_name = elt.value + elt_name = elt_name.value # If elt is in not_consumed, remove it from not_consumed if elt_name in not_consumed: del not_consumed[elt_name] @@ -230,8 +237,8 @@ def leave_module(self, node): try: file_from_modpath(name.split(".")) except ImportError: - self.add_message('undefined-all-variable', - args=elt_name, + self.add_message('undefined-all-variable', + args=elt_name, node=elt) except SyntaxError, exc: # don't yield an syntax-error warning, @@ -242,14 +249,18 @@ def leave_module(self, node): if not self.config.init_import and node.package: return for name, stmts in not_consumed.iteritems(): + if any(isinstance(stmt, astroid.AssName) + and isinstance(stmt.ass_type(), astroid.AugAssign) + for stmt in stmts): + continue stmt = stmts[0] if isinstance(stmt, astroid.Import): - self.add_message('W0611', args=name, node=stmt) + self.add_message('unused-import', args=name, node=stmt) elif isinstance(stmt, astroid.From) and stmt.modname != '__future__': if stmt.names[0][0] == '*': - self.add_message('W0614', args=name, node=stmt) + self.add_message('unused-wildcard-import', args=name, node=stmt) else: - self.add_message('W0611', args=name, node=stmt) + self.add_message('unused-import', args=name, node=stmt) del self._to_consume def visit_class(self, node): @@ -311,7 +322,8 @@ def visit_function(self, node): """visit function: update consumption analysis variable and check locals """ self._to_consume.append((copy(node.locals), {}, 'function')) - if not set(('W0621', 'W0622')) & self.active_msgs: + if not (self.linter.is_message_enabled('redefined-outer-name') or + self.linter.is_message_enabled('redefined-builtin')): return globs = node.root().globals for name, stmt in node.items(): @@ -321,15 +333,16 @@ def visit_function(self, node): line = globs[name][0].fromlineno dummy_rgx = self.config.dummy_variables_rgx if not dummy_rgx.match(name): - self.add_message('W0621', args=(name, line), node=stmt) + self.add_message('redefined-outer-name', args=(name, line), node=stmt) elif is_builtin(name): # do not print Redefining builtin for additional builtins - self.add_message('W0622', args=name, node=stmt) + self.add_message('redefined-builtin', args=name, node=stmt) def leave_function(self, node): """leave function: check function's locals are consumed""" not_consumed = self._to_consume.pop()[0] - if not set(('W0612', 'W0613')) & self.active_msgs: + if not (self.linter.is_message_enabled('unused-variable') or + self.linter.is_message_enabled('unused-argument')): return # don't check arguments of function which are only raising an exception if is_error(node): @@ -368,16 +381,17 @@ def leave_function(self, node): # don't check callback arguments XXX should be configurable if node.name.startswith('cb_') or node.name.endswith('_cb'): continue - self.add_message('W0613', args=name, node=stmt) + self.add_message('unused-argument', args=name, node=stmt) else: - self.add_message('W0612', args=name, node=stmt) + self.add_message('unused-variable', args=name, node=stmt) - @check_messages('W0601', 'W0602', 'W0603', 'W0604', 'W0622') + @check_messages('global-variable-undefined', 'global-variable-not-assigned', 'global-statement', + 'global-at-module-level', 'redefined-builtin') def visit_global(self, node): """check names imported exists in the global scope""" frame = node.frame() if isinstance(frame, astroid.Module): - self.add_message('W0604', node=node) + self.add_message('global-at-module-level', node=node) return module = frame.root() default_message = True @@ -397,24 +411,43 @@ def visit_global(self, node): break else: # global but no assignment - self.add_message('W0602', args=name, node=node) + self.add_message('global-variable-not-assigned', args=name, node=node) default_message = False if not assign_nodes: continue for anode in assign_nodes: if anode.parent is None: - self.add_message('W0622', args=name, node=node) + self.add_message('redefined-builtin', args=name, node=node) break if anode.frame() is module: # module level assignment break else: # global undefined at the module scope - self.add_message('W0601', args=name, node=node) + self.add_message('global-variable-undefined', args=name, node=node) default_message = False if default_message: - self.add_message('W0603', node=node) + self.add_message('global-statement', node=node) + + def _check_late_binding_closure(self, node, assignment_node, scope_type): + node_scope = node.scope() + if not isinstance(node_scope, (astroid.Lambda, astroid.Function)): + return + if isinstance(assignment_node, astroid.Comprehension): + if assignment_node.parent.parent_of(node.scope()): + self.add_message('cell-var-from-loop', node=node, args=node.name) + else: + assign_scope = assignment_node.scope() + maybe_for = assignment_node + while not isinstance(maybe_for, astroid.For): + if maybe_for is assign_scope: + break + maybe_for = maybe_for.parent + else: + if maybe_for.parent_of(node_scope) and not isinstance(node_scope.statement(), astroid.Return): + self.add_message('cell-var-from-loop', node=node, args=node.name) + def _loopvar_name(self, node, name): # filter variables according to node's scope # XXX used to filter parents but don't remember why, and removing this @@ -423,7 +456,7 @@ def _loopvar_name(self, node, name): #astmts = [stmt for stmt in node.lookup(name)[1] # if hasattr(stmt, 'ass_type')] and # not stmt.statement().parent_of(node)] - if 'W0631' not in self.active_msgs: + if not self.linter.is_message_enabled('undefined-loop-variable'): return astmts = [stmt for stmt in node.lookup(name)[1] if hasattr(stmt, 'ass_type')] @@ -449,14 +482,14 @@ def _loopvar_name(self, node, name): ass = astmts[0].ass_type() if isinstance(ass, (astroid.For, astroid.Comprehension, astroid.GenExpr)) \ and not ass.statement() is node.statement(): - self.add_message('W0631', args=name, node=node) + self.add_message('undefined-loop-variable', args=name, node=node) - @check_messages('W0623') + @check_messages('redefine-in-handler') def visit_excepthandler(self, node): for name in get_all_elements(node.name): clobbering, args = clobber_in_except(name) if clobbering: - self.add_message('W0623', args=args, node=name) + self.add_message('redefine-in-handler', args=args, node=name) def visit_assname(self, node): if isinstance(node.ass_type(), astroid.AugAssign): @@ -501,6 +534,8 @@ def visit_name(self, node): # the name has already been consumed, only check it's not a loop # variable used outside the loop if name in consumed: + defnode = assign_parent(consumed[name][0]) + self._check_late_binding_closure(node, defnode, scope_type) self._loopvar_name(node, name) break # mark the name as consumed if it's defined in this scope @@ -512,6 +547,7 @@ def visit_name(self, node): # checks for use before assignment defnode = assign_parent(to_consume[name][0]) if defnode is not None: + self._check_late_binding_closure(node, defnode, scope_type) defstmt = defnode.statement() defframe = defstmt.frame() maybee0601 = True @@ -539,14 +575,14 @@ def visit_name(self, node): and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))): if defstmt is stmt and isinstance(node, (astroid.DelName, astroid.AssName)): - self.add_message('E0602', args=name, node=node) + self.add_message('undefined-variable', args=name, node=node) elif self._to_consume[-1][-1] != 'lambda': # E0601 may *not* occurs in lambda scope - self.add_message('E0601', args=name, node=node) - if not isinstance(node, astroid.AssName): # Aug AssName - del to_consume[name] - else: + self.add_message('used-before-assignment', args=name, node=node) + if isinstance(node, astroid.AssName): # Aug AssName del consumed[name] + else: + del to_consume[name] # check it's not a loop variable used outside the loop self._loopvar_name(node, name) break @@ -555,9 +591,9 @@ def visit_name(self, node): # undefined name ! if not (name in astroid.Module.scope_attrs or is_builtin(name) or name in self.config.additional_builtins): - self.add_message('E0602', args=name, node=node) + self.add_message('undefined-variable', args=name, node=node) - @check_messages('E0611') + @check_messages('no-name-in-module') def visit_import(self, node): """check modules attribute accesses""" for name, _ in node.names: @@ -568,7 +604,7 @@ def visit_import(self, node): continue self._check_module_attrs(node, module, parts[1:]) - @check_messages('E0611') + @check_messages('no-name-in-module') def visit_from(self, node): """check modules attribute accesses""" name_parts = node.modname.split('.') @@ -597,10 +633,6 @@ def visit_assign(self, node): return targets = node.targets[0].itered() - if any(not isinstance(target_node, astroid.AssName) - for target_node in targets): - return - try: for infered in node.value.infer(): self._check_unpacking(infered, node, targets) @@ -653,7 +685,7 @@ def _check_module_attrs(self, node, module, module_names): if module is astroid.YES: return None except astroid.NotFoundError: - self.add_message('E0611', args=(name, module.name), node=node) + self.add_message('no-name-in-module', args=(name, module.name), node=node) return None except astroid.InferenceError: return None @@ -661,7 +693,7 @@ def _check_module_attrs(self, node, module, module_names): # FIXME: other message if name is not the latest part of # module_names ? modname = module and module.name or '__dict__' - self.add_message('E0611', node=node, + self.add_message('no-name-in-module', node=node, args=('.'.join(module_names), modname)) return None if isinstance(module, astroid.Module): @@ -684,6 +716,22 @@ def leave_listcomp(self, _): # do not check for not used locals here self._to_consume.pop() + def leave_module(self, node): + """ Update consumption analysis variable + for metaclasses. + """ + for klass in node.nodes_of_class(astroid.Class): + if klass._metaclass: + metaclass = klass.metaclass() + module_locals = self._to_consume[0][0] + + if isinstance(klass._metaclass, astroid.Name): + module_locals.pop(klass._metaclass.name, None) + if metaclass: + # if it uses a `metaclass=module.Class` + module_locals.pop(metaclass.root().name, None) + super(VariablesChecker3k, self).leave_module(node) + if sys.version_info >= (3, 0): VariablesChecker = VariablesChecker3k diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py index cf30c182..992c2934 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py @@ -97,6 +97,8 @@ def find_pylintrc(): pylintrc = ".pylintrc" else: pylintrc = join(user_home, '.pylintrc') + if not isfile(pylintrc): + pylintrc = join(user_home, '.config', 'pylintrc') if not isfile(pylintrc): if isfile('/etc/pylintrc'): pylintrc = '/etc/pylintrc' diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py index 7d88d8e4..529fbd44 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py @@ -29,6 +29,7 @@ # import this first to avoid builtin namespace pollution from pylint.checkers import utils +import functools import sys import os import tokenize @@ -38,7 +39,7 @@ from logilab.common.optik_ext import check_csv from logilab.common.modutils import load_module_from_name, get_module_part from logilab.common.interface import implements -from logilab.common.textutils import splitstrip +from logilab.common.textutils import splitstrip, unquote from logilab.common.ureports import Table, Text, Section from logilab.common.__pkginfo__ import version as common_version @@ -89,10 +90,6 @@ def _get_python_path(filepath): 'ignored-builtin-module', 'Used to indicate that the user asked to analyze a builtin ' 'module which has been skipped.'), - 'F0004': ('unexpected inferred value %s', - 'unexpected-inferred-value', - 'Used to indicate that some value of an unexpected type has been ' - 'inferred.'), 'F0010': ('error while code parsing: %s', 'parse-error', 'Used when an exception occured while building the Astroid ' @@ -108,23 +105,17 @@ def _get_python_path(filepath): 'Used when an inline option is either badly formatted or can\'t ' 'be used inside modules.'), - 'I0011': ('Locally disabling %s', + 'I0011': ('Locally disabling %s (%s)', 'locally-disabled', 'Used when an inline option disables a message or a messages ' 'category.'), - 'I0012': ('Locally enabling %s', + 'I0012': ('Locally enabling %s (%s)', 'locally-enabled', 'Used when an inline option enables a message or a messages ' 'category.'), 'I0013': ('Ignoring entire file', 'file-ignored', 'Used to inform that the file will not be checked'), - 'I0014': ('Used deprecated directive "pylint:disable-all" or ' - '"pylint:disable=all"', - 'deprecated-disable-all', - 'You should preferably use "pylint:skip-file" as this directive ' - 'has a less confusing name. Do this only if you are sure that ' - 'all people running Pylint on your code have version >= 0.26'), 'I0020': ('Suppressed %s (from line %d)', 'suppressed-message', 'A message was triggered on a line, but suppressed explicitly ' @@ -135,11 +126,12 @@ def _get_python_path(filepath): 'useless-suppression', 'Reported when a message is explicitly disabled for a line or ' 'a block of code, but never triggered.'), - 'I0022': ('Deprecated pragma "pylint:disable-msg" or "pylint:enable-msg"', + 'I0022': ('Pragma "%s" is deprecated, use "%s" instead', 'deprecated-pragma', - 'You should preferably use "pylint:disable" or "pylint:enable" ' - 'instead of the deprecated suppression pragma style ' - '"pylint:disable-msg" or "pylint:enable-msg"'), + 'Some inline pylint options have been renamed or reworked, ' + 'only the most recent form should be used. ' + 'NOTE:skip-all is only available with pylint >= 0.26', + {'old_names': [('I0014', 'deprecated-disable-all')]}), 'E0001': ('%s', 'syntax-error', @@ -154,6 +146,13 @@ def _get_python_path(filepath): } +def _deprecated_option(shortname, opt_type): + def _warn_deprecated(option, optname, *args): + sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,)) + return {'short': shortname, 'help': 'DEPRECATED', 'hide': True, + 'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated} + + class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn, BaseTokenChecker): """lint Python modules using external checkers. @@ -271,7 +270,10 @@ def make_options(): 'This is a python new-style format string ' 'used to format the message information. ' 'See doc for all details') - }), # msg-template + }), + + ('include-ids', _deprecated_option('i', 'yn')), + ('symbols', _deprecated_option('s', 'yn')), ) option_groups = ( @@ -389,7 +391,7 @@ def set_option(self, optname, value, action=None, optdict=None): value = check_csv(None, optname, value) if isinstance(value, (list, tuple)): for _id in value: - meth(_id) + meth(_id, ignore_unknown=True) else: meth(value) elif optname == 'output-format': @@ -453,25 +455,24 @@ def process_tokens(self, tokens): """process tokens from the current module to search for module/block level options """ - comment = tokenize.COMMENT - newline = tokenize.NEWLINE - for (tok_type, _, start, _, line) in tokens: - if tok_type not in (comment, newline): + for (tok_type, content, start, _, _) in tokens: + if tok_type != tokenize.COMMENT: continue - match = OPTION_RGX.search(line) + match = OPTION_RGX.search(content) if match is None: continue if match.group(1).strip() == "disable-all" or \ match.group(1).strip() == 'skip-file': if match.group(1).strip() == "disable-all": - self.add_message('I0014', line=start[0]) - self.add_message('I0013', line=start[0]) + self.add_message('deprecated-pragma', line=start[0], + args=('disable-all', 'skip-file')) + self.add_message('file-ignored', line=start[0]) self._ignore_file = True return try: opt, value = match.group(1).split('=', 1) except ValueError: - self.add_message('I0010', args=match.group(1).strip(), + self.add_message('bad-inline-option', args=match.group(1).strip(), line=start[0]) continue opt = opt.strip() @@ -481,19 +482,19 @@ def process_tokens(self, tokens): except KeyError: meth = self._bw_options_methods[opt] # found a "(dis|en)able-msg" pragma deprecated suppresssion - self.add_message('deprecated-pragma', line=start[0]) + self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', ''))) for msgid in splitstrip(value): try: if (opt, msgid) == ('disable', 'all'): - self.add_message('I0014', line=start[0]) - self.add_message('I0013', line=start[0]) + self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file')) + self.add_message('file-ignored', line=start[0]) self._ignore_file = True return meth(msgid, 'module', start[0]) except UnknownMessage: - self.add_message('E0012', args=msgid, line=start[0]) + self.add_message('bad-option-value', args=msgid, line=start[0]) else: - self.add_message('E0011', args=opt, line=start[0]) + self.add_message('unrecognized-inline-option', args=opt, line=start[0]) def collect_block_lines(self, node, msg_state): """walk ast to collect block level options line numbers""" @@ -569,14 +570,13 @@ def prepare_checkers(self): if (messages or any(self.report_is_enabled(r[0]) for r in checker.reports)): neededcheckers.append(checker) - checker.active_msgs = messages return neededcheckers def should_analyze_file(self, modname, path): # pylint: disable=unused-argument """Returns whether or not a module should be checked. - This implementation returns True for all inputs, indicating that all - files should be linted. + This implementation returns True for all python source file, indicating + that all files should be linted. Subclasses may override this method to indicate that modules satisfying certain conditions should not be linted. @@ -586,7 +586,7 @@ def should_analyze_file(self, modname, path): # pylint: disable=unused-argument :returns: True if the module should be checked. :rtype: bool """ - return True + return path.endswith('.py') def check(self, files_or_modules): """main checking entry: check a list of files or modules from their @@ -607,7 +607,7 @@ def check(self, files_or_modules): # build ast and check modules or packages for descr in self.expand_files(files_or_modules): modname, filepath = descr['name'], descr['path'] - if not self.should_analyze_file(modname, filepath): + if not descr['isarg'] and not self.should_analyze_file(modname, filepath): continue if self.config.files_output: reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension) @@ -622,7 +622,7 @@ def check(self, files_or_modules): self._ignore_file = False # fix the current file (if the source file was not available or # if it's actually a c extension) - self.current_file = astroid.file + self.current_file = astroid.file # pylint: disable=maybe-no-member self.check_astroid_module(astroid, walker, rawcheckers, tokencheckers) self._add_suppression_messages() # notify global end @@ -640,7 +640,7 @@ def expand_files(self, modules): message = modname = error["mod"] key = error["key"] self.set_current_module(modname) - if key == "F0001": + if key == "fatal": message = str(error["ex"]).replace(os.getcwd() + os.sep, '') self.add_message(key, args=message) return result @@ -671,13 +671,13 @@ def get_ast(self, filepath, modname): try: return MANAGER.ast_from_file(filepath, modname, source=True) except SyntaxError, ex: - self.add_message('E0001', line=ex.lineno, args=ex.msg) + self.add_message('syntax-error', line=ex.lineno, args=ex.msg) except AstroidBuildingException, ex: - self.add_message('F0010', args=ex) + self.add_message('parse-error', args=ex) except Exception, ex: import traceback traceback.print_exc() - self.add_message('F0002', args=(ex.__class__, ex)) + self.add_message('astroid-error', args=(ex.__class__, ex)) def check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers): """check a module from its astroid representation, real work""" @@ -685,11 +685,11 @@ def check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers): try: tokens = tokenize_module(astroid) except tokenize.TokenError, ex: - self.add_message('E0001', line=ex.args[1][0], args=ex.args[0]) + self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0]) return if not astroid.pure_python: - self.add_message('I0001', args=astroid.name) + self.add_message('raw-checker-failed', args=astroid.name) else: #assert astroid.file.endswith('.py') # invoke ITokenChecker interface on self to fetch module/block @@ -753,12 +753,12 @@ def _add_suppression_messages(self): for warning, lines in self._raw_module_msgs_state.iteritems(): for line, enable in lines.iteritems(): if not enable and (warning, line) not in self._ignored_msgs: - self.add_message('I0021', line, None, + self.add_message('useless-suppression', line, None, (self.get_msg_display_string(warning),)) # don't use iteritems here, _ignored_msgs may be modified by add_message for (warning, from_), lines in self._ignored_msgs.items(): for line in lines: - self.add_message('I0020', line, None, + self.add_message('suppressed-message', line, None, (self.get_msg_display_string(warning), from_)) def report_evaluation(self, sect, stats, previous_stats): @@ -907,7 +907,7 @@ def __init__(self, args, reporter=None, exit=True): try: preprocess_options(args, { # option: (callback, takearg) - 'init-hooks': (cb_init_hook, True), + 'init-hook': (cb_init_hook, True), 'rcfile': (self.cb_set_rcfile, True), 'load-plugins': (self.cb_add_plugins, True), }) @@ -1005,12 +1005,16 @@ def __init__(self, args, reporter=None, exit=True): 'been issued by analysing pylint output status code\n', level=1) # read configuration - linter.disable('W0704') - linter.disable('I0020') - linter.disable('I0021') + linter.disable('pointless-except') + linter.disable('suppressed-message') + linter.disable('useless-suppression') linter.read_config_file() - # is there some additional plugins in the file configuration, in config_parser = linter.cfgfile_parser + # run init hook, if present, before loading plugins + if config_parser.has_option('MASTER', 'init-hook'): + cb_init_hook('init-hook', + unquote(config_parser.get('MASTER', 'init-hook'))) + # is there some additional plugins in the file configuration, in if config_parser.has_option('MASTER', 'load-plugins'): plugins = splitstrip(config_parser.get('MASTER', 'load-plugins')) linter.load_plugin_modules(plugins) diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py index ff579055..c6a73713 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/utils.py @@ -239,7 +239,6 @@ def register_messages(self, checker): chkid = msg.msgid[1:3] if not msg.may_be_emitted(): self._msgs_state[msg.msgid] = False - continue self._messages[msg.symbol] = msg self._alternative_names[msg.msgid] = msg for old_id, old_symbol in msg.old_names: @@ -247,7 +246,7 @@ def register_messages(self, checker): self._alternative_names[old_symbol] = msg self._msgs_by_category.setdefault(msg.msgid[0], []).append(msg.msgid) - def disable(self, msgid, scope='package', line=None): + def disable(self, msgid, scope='package', line=None, ignore_unknown=False): """don't output message of the given id""" assert scope in ('package', 'module') # handle disable=all by disabling all categories @@ -272,16 +271,24 @@ def disable(self, msgid, scope='package', line=None): if msgid.lower().startswith('rp'): self.disable_report(msgid) return - # msgid is a symbolic or numeric msgid. - msg = self.check_message_id(msgid) + + try: + # msgid is a symbolic or numeric msgid. + msg = self.check_message_id(msgid) + except UnknownMessage: + if ignore_unknown: + return + raise + if scope == 'module': assert line > 0 try: self._module_msgs_state[msg.msgid][line] = False except KeyError: self._module_msgs_state[msg.msgid] = {line: False} - if msgid != 'I0011': - self.add_message('I0011', line=line, args=msg.msgid) + if msg.symbol != 'locally-disabled': + self.add_message('locally-disabled', line=line, + args=(msg.symbol, msg.msgid)) else: msgs = self._msgs_state @@ -290,7 +297,7 @@ def disable(self, msgid, scope='package', line=None): self.config.disable_msg = [mid for mid, val in msgs.iteritems() if not val] - def enable(self, msgid, scope='package', line=None): + def enable(self, msgid, scope='package', line=None, ignore_unknown=False): """reenable message of the given id""" assert scope in ('package', 'module') catid = category_id(msgid) @@ -309,15 +316,22 @@ def enable(self, msgid, scope='package', line=None): if msgid.lower().startswith('rp'): self.enable_report(msgid) return - # msgid is a symbolic or numeric msgid. - msg = self.check_message_id(msgid) + + try: + # msgid is a symbolic or numeric msgid. + msg = self.check_message_id(msgid) + except UnknownMessage: + if ignore_unknown: + return + raise + if scope == 'module': assert line > 0 try: self._module_msgs_state[msg.msgid][line] = True except KeyError: self._module_msgs_state[msg.msgid] = {line: True} - self.add_message('I0012', line=line, args=msg.msgid) + self.add_message('locally-enabled', line=line, args=(msg.symbol, msg.msgid)) else: msgs = self._msgs_state msgs[msg.msgid] = True @@ -400,6 +414,8 @@ def add_message(self, msg_descr, line=None, node=None, args=None): """ msg_info = self.check_message_id(msg_descr) msgid = msg_info.msgid + # backward compatibility, message may not have a symbol + symbol = msg_info.symbol or msgid # Fatal messages and reports are special, the node/scope distinction # does not apply to them. if msgid[0] not in _SCOPE_EXEMPT: @@ -427,9 +443,9 @@ def add_message(self, msg_descr, line=None, node=None, args=None): self.stats[msg_cat] += 1 self.stats['by_module'][self.current_name][msg_cat] += 1 try: - self.stats['by_msg'][msg_info.symbol] += 1 + self.stats['by_msg'][symbol] += 1 except KeyError: - self.stats['by_msg'][msg_info.symbol] = 1 + self.stats['by_msg'][symbol] = 1 # expand message ? msg = msg_info.msg if args: @@ -521,6 +537,8 @@ def list_messages(self): """output full messages list documentation in ReST format""" msgs = sorted(self._messages.itervalues(), key=lambda msg: msg.msgid) for msg in msgs: + if not msg.may_be_emitted(): + continue print msg.format_help(checkerref=False) print @@ -612,15 +630,15 @@ def expand_modules(files_or_modules, black_list): try: filepath = file_from_modpath(modname.split('.')) if filepath is None: - errors.append({'key' : 'F0003', 'mod': modname}) + errors.append({'key' : 'ignored-builtin-module', 'mod': modname}) continue except (ImportError, SyntaxError), ex: # FIXME p3k : the SyntaxError is a Python bug and should be # removed as soon as possible http://bugs.python.org/issue10588 - errors.append({'key': 'F0001', 'mod': modname, 'ex': ex}) + errors.append({'key': 'fatal', 'mod': modname, 'ex': ex}) continue filepath = normpath(filepath) - result.append({'path': filepath, 'name': modname, + result.append({'path': filepath, 'name': modname, 'isarg': True, 'basepath': filepath, 'basename': modname}) if not (modname.endswith('.__init__') or modname == '__init__') \ and '__init__.py' in filepath: @@ -629,6 +647,7 @@ def expand_modules(files_or_modules, black_list): continue submodname = '.'.join(modpath_from_file(subfilepath)) result.append({'path': subfilepath, 'name': submodname, + 'isarg': False, 'basepath': filepath, 'basename': modname}) return result, errors @@ -645,7 +664,6 @@ def __init__(self, linter): def _is_method_enabled(self, method): if not hasattr(method, 'checks_msgs'): return True - for msg_desc in method.checks_msgs: if self.linter.is_message_enabled(msg_desc): return True From 20d02881a07efc316164149722cf25bcc468b9db Mon Sep 17 00:00:00 2001 From: Kevin Kwon Date: Sat, 10 May 2014 21:57:31 +0900 Subject: [PATCH 092/428] fixed autoimport in windows, gVim is displaying --multiprocessing-fork due to the option changed previously. Think it should be reverted back to the previous version --- pymode/rope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index b0833ddb..7d50c7d4 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -362,7 +362,7 @@ def __init__(self, path, project_path): if os.path.exists("%s/__init__.py" % project_path): sys.path.append(project_path) - if self.options.get('autoimport'): + if self.options.get('autoimport') == '1': self.generate_autoimport_cache() env.debug('Context init', project_path) From d9699cd0963c29dd54ee2a120a8be26f1d981073 Mon Sep 17 00:00:00 2001 From: Jay Rainey Date: Thu, 22 May 2014 19:06:39 +0100 Subject: [PATCH 093/428] Remove unnecessary copyrights from README As references to these modules have previously been made (in the opening paragraph) there is no need to explicitly list their copyright. --- README.rst | 119 +++++------------------------------------------------ 1 file changed, 10 insertions(+), 109 deletions(-) diff --git a/README.rst b/README.rst index 497a8c45..3f2e050c 100644 --- a/README.rst +++ b/README.rst @@ -167,111 +167,6 @@ Copyright Copyright © 2013 Kirill Klenov (klen_) -Rope ------ -Copyright (C) 2006-2010 Ali Gholami Rudi - -Copyright (C) 2009-2010 Anton Gritsay - -https://pypi.python.org/pypi/rope - -https://pypi.python.org/pypi/ropemode - -http://rope.sourceforge.net/ropevim.html - - -Pylama ------- -Copyright (C) 2012-2013 Kirill Klenov - -https://pypi.python.org/pypi/pylama - -https://github.com/klen/pylama - - -Pylint ------- -Copyright (C) 2003-2011 LOGILAB S.A. (Paris, FRANCE). - -https://pypi.python.org/pypi/pylint - -https://bitbucket.org/logilab/pylint - -http://www.pylint.org/ - -http://www.logilab.fr/ - - -Pyflakes --------- - -Copyright (c) 2005 Divmod, Inc. - -https://pypi.python.org/pypi/pyflakes - -https://launchpad.net/pyflakes - -http://www.divmod.com/ - - -pep8 ----- -Copyright (C) 2006 Johann C. Rocholl - -https://pypi.python.org/pypi/pep8 - -http://github.com/jcrocholl/pep8 - -http://www.python.org/dev/peps/pep-0008/ - - -autopep8 --------- -Copyright (C) 2010-2011 Hideo Hattori - -http://github.com/hynek/vim-python-pep8-indent - - License ======= @@ -281,12 +176,18 @@ If you like this plugin, you can send me postcard :) My address is here: "Russia, 143401, Krasnogorsk, Shkolnaya 1-19" to "Kirill Klenov". **Thanks for support!** - .. _GNU lesser general public license: http://www.gnu.org/copyleft/lesser.html .. _klen: http://klen.github.com/ .. _pydoc: http://docs.python.org/library/pydoc.html .. _pathogen: https://github.com/tpope/vim-pathogen -.. _mccabe: http://en.wikipedia.org/wiki/Cyclomatic_complexity -.. _Rope: http://rope.sourceforge.net/ -.. _Pylama: https://github.com/klen/pylama +.. _rope: https://pypi.python.org/pypi/rope +.. _pylama: https://github.com/klen/pylama +.. _pylint: https://bitbucket.org/logilab/pylint +.. _pyflakes: https://pypi.python.org/pypi/pyflakes +.. _autopep8: https://github.com/hhatto/autopep8 +.. _pep257: http://github.com/GreenSteam/pep257 +.. _mccabe: https://github.com/flintwork/mccabe +.. _pythonvim: http://www.hlabs.spb.ru/vim/python.vim +.. _pep8: http://github.com/jcrocholl/pep8 +.. _pep8indent: http://github.com/hynek/vim-python-pep8-indent .. |logo| image:: https://raw.github.com/klen/python-mode/develop/logo.png From 4c2a413fbe166cf8a4ce0ddf6d02a778096bd201 Mon Sep 17 00:00:00 2001 From: Jay Rainey Date: Thu, 22 May 2014 19:07:46 +0100 Subject: [PATCH 094/428] Added name to AUTHORS file --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 92cdf73a..a287aabe 100644 --- a/AUTHORS +++ b/AUTHORS @@ -19,6 +19,7 @@ Contributors: * Florent Xicluna (florentx); * Fredrik Henrysson (fhenrysson); * Igor Guerrero (igorgue); +* Jay Rainey (jawrainey); * Jonathan McCall (Jonnymcc); * Kevin Deldycke (kdeldycke); * Lowe Thiderman (thiderman); From d2111cd3af072c81c927114ab7bc3352d5bc170d Mon Sep 17 00:00:00 2001 From: "Wu, Fan" Date: Wed, 28 May 2014 00:27:38 -0400 Subject: [PATCH 095/428] stdout will be set to vim.message, but in early release of vim 7.3, message object didn't have method flush. It will cause some problems in multiprocessing related code. Fixed of issue #426 --- pymode/rope.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pymode/rope.py b/pymode/rope.py index 7d50c7d4..4a1092dc 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -6,6 +6,7 @@ import re import site import sys +import StringIO from rope.base import project, libutils, exceptions, change, worder # noqa from rope.base.fscommands import FileSystemCommands # noqa @@ -398,9 +399,12 @@ def _update_cache(importer, modules=None): importer.generate_modules_cache(modules) importer.project.sync() + sys.stdout, stdout_ = StringIO.StringIO(), sys.stdout + sys.stderr, stderr_ = StringIO.StringIO(), sys.stderr process = multiprocessing.Process(target=_update_cache, args=( self.importer, modules)) process.start() + sys.stdout, sys.stderr = stdout_, stderr_ class ProgressHandler(object): From c8271bac6145b1a03a374e098705060d9f908d6a Mon Sep 17 00:00:00 2001 From: Jason Harvey Date: Thu, 29 May 2014 16:37:22 -0800 Subject: [PATCH 096/428] Replace literal special chars in mappings. Replaces \ with , which makes mappings work for those having cpoptions-=B. Also replaces | with to clean up the ugly double-escape. --- after/ftplugin/python.vim | 60 +++++++++++++++++++-------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/after/ftplugin/python.vim b/after/ftplugin/python.vim index e27cfc47..0dec7542 100644 --- a/after/ftplugin/python.vim +++ b/after/ftplugin/python.vim @@ -11,36 +11,36 @@ if g:pymode_motion finish endif - nnoremap ]] :call pymode#motion#move('^\(class\\|def\)\s', '') - nnoremap [[ :call pymode#motion#move('^\(class\\|def\)\s', 'b') - nnoremap ]C :call pymode#motion#move('^\(class\\|def\)\s', '') - nnoremap [C :call pymode#motion#move('^\(class\\|def\)\s', 'b') - nnoremap ]M :call pymode#motion#move('^\s*def\s', '') - nnoremap [M :call pymode#motion#move('^\s*def\s', 'b') - - onoremap ]] :call pymode#motion#move('^\(class\\|def\)\s', '') - onoremap [[ :call pymode#motion#move('^\(class\\|def\)\s', 'b') - onoremap ]C :call pymode#motion#move('^\(class\\|def\)\s', '') - onoremap [C :call pymode#motion#move('^\(class\\|def\)\s', 'b') - onoremap ]M :call pymode#motion#move('^\s*def\s', '') - onoremap [M :call pymode#motion#move('^\s*def\s', 'b') - - vnoremap ]] :call pymode#motion#vmove('^\(class\\|def\)\s', '') - vnoremap [[ :call pymode#motion#vmove('^\(class\\|def\)\s', 'b') - vnoremap ]M :call pymode#motion#vmove('^\s*def\s', '') - vnoremap [M :call pymode#motion#vmove('^\s*def\s', 'b') - - onoremap C :call pymode#motion#select('^\s*class\s', 0) - onoremap aC :call pymode#motion#select('^\s*class\s', 0) - onoremap iC :call pymode#motion#select('^\s*class\s', 1) - vnoremap aC :call pymode#motion#select('^\s*class\s', 0) - vnoremap iC :call pymode#motion#select('^\s*class\s', 1) - - onoremap M :call pymode#motion#select('^\s*def\s', 0) - onoremap aM :call pymode#motion#select('^\s*def\s', 0) - onoremap iM :call pymode#motion#select('^\s*def\s', 1) - vnoremap aM :call pymode#motion#select('^\s*def\s', 0) - vnoremap iM :call pymode#motion#select('^\s*def\s', 1) + nnoremap ]] :call pymode#motion#move('v^(classdef)s', '') + nnoremap [[ :call pymode#motion#move('v^(classdef)s', 'b') + nnoremap ]C :call pymode#motion#move('v^(classdef)s', '') + nnoremap [C :call pymode#motion#move('v^(classdef)s', 'b') + nnoremap ]M :call pymode#motion#move('^s*defs', '') + nnoremap [M :call pymode#motion#move('^s*defs', 'b') + + onoremap ]] :call pymode#motion#move('v^(classdef)s', '') + onoremap [[ :call pymode#motion#move('v^(classdef)s', 'b') + onoremap ]C :call pymode#motion#move('v^(classdef)s', '') + onoremap [C :call pymode#motion#move('v^(classdef)s', 'b') + onoremap ]M :call pymode#motion#move('^s*defs', '') + onoremap [M :call pymode#motion#move('^s*defs', 'b') + + vnoremap ]] :call pymode#motion#vmove('v^(classdef)s', '') + vnoremap [[ :call pymode#motion#vmove('v^(classdef)s', 'b') + vnoremap ]M :call pymode#motion#vmove('^s*defs', '') + vnoremap [M :call pymode#motion#vmove('^s*defs', 'b') + + onoremap C :call pymode#motion#select('^s*classs', 0) + onoremap aC :call pymode#motion#select('^s*classs', 0) + onoremap iC :call pymode#motion#select('^s*classs', 1) + vnoremap aC :call pymode#motion#select('^s*classs', 0) + vnoremap iC :call pymode#motion#select('^s*classs', 1) + + onoremap M :call pymode#motion#select('^s*defs', 0) + onoremap aM :call pymode#motion#select('^s*defs', 0) + onoremap iM :call pymode#motion#select('^s*defs', 1) + vnoremap aM :call pymode#motion#select('^s*defs', 0) + vnoremap iM :call pymode#motion#select('^s*defs', 1) endif From d1ee48c1a92d9d99a2e8919614ad6e18cce1dbac Mon Sep 17 00:00:00 2001 From: Ben Davis Date: Sun, 1 Jun 2014 15:45:53 -0500 Subject: [PATCH 097/428] Added options to give more control over rope paths * Ability to override rope project root and .ropeproject folder * Added path argument to `PymodeRopeNewProject` which skips prompt * Options added: 'pymode_rope_project_root', 'pymode_rope_ropefolder' --- Changelog.rst | 5 +++++ autoload/pymode/rope.vim | 2 +- doc/pymode.txt | 28 +++++++++++++++++++++++++--- ftplugin/python/pymode.vim | 2 +- plugin/pymode.vim | 6 ++++++ pymode/rope.py | 28 +++++++++++++++++++++------- 6 files changed, 59 insertions(+), 12 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 8d102795..d69ea7f5 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -4,6 +4,11 @@ Changelog * Get fold's expression symbol from &fillchars; * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); * Fixed code running; + * Ability to override rope project root and .ropeproject folder + * Added path argument to `PymodeRopeNewProject` which skips prompt + + * Options added: + 'pymode_rope_project_root', 'pymode_rope_ropefolder' ## 2013-12-04 0.7.8b diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim index a6170c33..a82a46d9 100644 --- a/autoload/pymode/rope.vim +++ b/autoload/pymode/rope.vim @@ -83,7 +83,7 @@ fun! pymode#rope#regenerate() "{{{ endfunction "}}} -fun! pymode#rope#new() "{{{ +fun! pymode#rope#new(...) "{{{ PymodePython rope.new() endfunction "}}} diff --git a/doc/pymode.txt b/doc/pymode.txt index 23c2710d..94e28782 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -354,7 +354,7 @@ Turn on the rope script *'g:pymode_rope'* .ropeproject Folder ~ *.ropeproject* -*:PymodeRopeNewProject* -- Open new Rope project in current working directory +*:PymodeRopeNewProject* [] -- Open new Rope project in the given path *:PymodeRopeRegenerate* -- Regenerate the project cache Rope uses a folder inside projects for holding project configuration and data. @@ -371,8 +371,9 @@ Currently it is used for things such as: * It can be used to save information about object inferences. * It can be used to save a global name cache, which is used for auto-import. -If `.ropeproject` is not found in the current directory, rope will look -recursively for it in parent folders. +By default, if `.ropeproject` is not found in the current directory, rope will +look recursively for it in parent folders. + Warning: If rope finds `.ropeproject` in a parent dir, it will use it with all its child directories, which may slow scanning down (because of many, possibly unrelated, files) @@ -382,6 +383,23 @@ Enable searching for |.ropeproject| in parent directories > let g:pymode_rope_lookup_project = 1 +You can also manually set the rope project directory. If not specified rope will +use the current directory. + *'g:pymode_rope_project_root'* +> + let g:pymode_rope_project_root = "" + + +The location of the `.ropeproject` folder may also be overridden if you wish to +keep it outside of your project root. The rope library treats this folder as a +project resource, so the path will always be relative to your proejct root (a +leading '/' will be ignored). You may use `'..'` path segments to place the +folder outside of your project root. + *'g:pymode_rope_ropefolder'* +> + let g:pymode_rope_ropefolder='.ropeproject' + + Show documentation for element under cursor ~ @@ -646,6 +664,10 @@ Solutions: - Set |'g:pymode_rope_lookup_project'| to 0 for prevent searching in parent dirs. +You may also set |'g:pymode_rope_project_root'| to manually specify the project +root path. + + Pylint check is very slow ------------------------- diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 2c162e7b..d5fe8aba 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -177,7 +177,7 @@ if g:pymode_rope inoremap . .=pymode#rope#complete_on_dot() end - command! -buffer PymodeRopeNewProject call pymode#rope#new() + command! -buffer -nargs=? PymodeRopeNewProject call pymode#rope#new() command! -buffer PymodeRopeUndo call pymode#rope#undo() command! -buffer PymodeRopeRedo call pymode#rope#redo() command! -buffer PymodeRopeRenameModule call pymode#rope#rename_module() diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 9e08f33d..077cb351 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -151,6 +151,12 @@ call pymode#default('g:pymode_rope', 1) " System plugin variable call pymode#default('g:pymode_rope_current', '') +" Configurable rope project root +call pymode#default('g:pymode_rope_project_root', '') + +" Configurable rope project folder (always relative to project root) +call pymode#default('g:pymode_rope_ropefolder', '.ropeproject') + " If project hasnt been finded in current working directory, look at parents directory call pymode#default('g:pymode_rope_lookup_project', 1) diff --git a/pymode/rope.py b/pymode/rope.py index b0833ddb..a82f8f15 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -233,8 +233,16 @@ def regenerate(): def new(): """ Create a new project. """ - root = env.var('input("Enter project root: ", getcwd())') - prj = project.Project(projectroot=root) + root = None + if env.var('a:0') != '0': + root = env.var('a:1') + else: + default = env.var('g:pymode_rope_project_root') + if not default: + default = env.var('getcwd()') + root = env.var('input("Enter project root: ", "%s")' % default) + ropefolder = env.var('g:pymode_rope_ropefolder') + prj = project.Project(projectroot=root, ropefolder=ropefolder) prj.close() env.message("Project is opened: %s" % root) @@ -291,12 +299,18 @@ def get_ctx(*args, **kwargs): if resources.get(path): return resources.get(path) - project_path = env.curdir - env.debug('Look ctx', project_path) - if env.var('g:pymode_rope_lookup_project', True): - project_path = look_ropeproject(project_path) + project_path = env.var('g:pymode_rope_project_root') + if project_path: + project_path = env.curdir + env.debug('Look ctx', project_path) + if env.var('g:pymode_rope_lookup_project', True): + project_path = look_ropeproject(project_path) - ctx = projects.get(project_path) + if not os.path.exists(project_path): + env.error("Rope project root not exist: %s" % project_path) + ctx = None + else: + ctx = projects.get(project_path) if not ctx: projects[project_path] = ctx = cls(path, project_path) resources[path] = ctx From d610ccd38061ea1406c736726991aec2227be82c Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Jun 2014 13:07:07 +0400 Subject: [PATCH 098/428] Update pylama to version 3.3.2 --- pymode/__init__.py | 2 - pymode/async.py | 1 + pymode/environment.py | 15 +-- pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/config.py | 3 +- pymode/libs/pylama/core.py | 102 ++++++++++++-------- pymode/libs/pylama/errors.py | 63 ++++++++++++ pymode/libs/pylama/hook.py | 1 + pymode/libs/pylama/lint/pylama_pep8/pep8.py | 61 +++++++----- pymode/libs/pylama/main.py | 3 +- pymode/libs/pylama/tasks.py | 9 +- pymode/lint.py | 7 +- pymode/rope.py | 28 +++--- pymode/utils.py | 2 - pymode/virtualenv.py | 1 - 15 files changed, 192 insertions(+), 108 deletions(-) create mode 100644 pymode/libs/pylama/errors.py diff --git a/pymode/__init__.py b/pymode/__init__.py index 857bac60..602bd95f 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -8,7 +8,6 @@ def auto(): """ Fix PEP8 erorrs in current buffer. """ - from .autopep8 import fix_file class Options(object): @@ -30,7 +29,6 @@ class Options(object): def get_documentation(): """ Search documentation and append to current buffer. """ - try: from StringIO import StringIO except ImportError: diff --git a/pymode/async.py b/pymode/async.py index d40ba218..75f1ebdf 100644 --- a/pymode/async.py +++ b/pymode/async.py @@ -1,4 +1,5 @@ """ Python-mode async support. """ + try: from Queue import Queue except ImportError: diff --git a/pymode/environment.py b/pymode/environment.py index 247d5700..69d97ffb 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -17,6 +17,7 @@ class VimPymodeEnviroment(object): prefix = '[Pymode]' def __init__(self): + """ Init VIM environment. """ self.current = vim.current self.options = dict(encoding=vim.eval('&enc')) self.options['debug'] = self.var('g:pymode_debug', True) @@ -24,13 +25,11 @@ def __init__(self): @property def curdir(self): """ Return current working directory. """ - return self.var('getcwd()') @property def curbuf(self): """ Return current buffer. """ - return self.current.buffer @property @@ -45,7 +44,6 @@ def cursor(self): @property def source(self): """ Return source of current buffer. """ - return "\n".join(self.lines) @property @@ -66,7 +64,6 @@ def var(self, name, to_bool=False): :return vimobj: """ - value = vim.eval(name) if to_bool: @@ -82,7 +79,6 @@ def message(self, msg, history=False): :return: :None """ - if history: return vim.command('echom "%s"' % str(msg)) @@ -149,16 +145,14 @@ def error(self, msg): def debug(self, msg, *args): """ Print debug information. """ - if self.options.get('debug'): print("%s %s [%s]" % ( int(time.time()), msg, ', '.join([str(a) for a in args]))) def stop(self, value=None): """ Break Vim function. """ - cmd = 'return' - if not value is None: + if value is not None: cmd += ' ' + self.prepare_value(value) vim.command(cmd) @@ -168,7 +162,6 @@ def catch_exceptions(self, func): :return func: """ - def _wrapper(*args, **kwargs): try: return func(*args, **kwargs) @@ -181,7 +174,6 @@ def _wrapper(*args, **kwargs): def run(self, name, *args): """ Run vim function. """ - vim.command('call %s(%s)' % (name, ", ".join([ self.prepare_value(a) for a in args ]))) @@ -198,7 +190,6 @@ def prepare_value(self, value, dumps=True): :return unicode string: """ - if dumps: value = json.dumps(value) @@ -229,12 +220,10 @@ def get_offset_params(self, cursor=None, base=""): def goto_line(self, line): """ Go to line. """ - vim.command('normal %sggzz' % line) def goto_file(self, path, cmd='e', force=False): """ Function description. """ - if force or os.path.abspath(path) != self.curbuf.name: self.debug('read', path) vim.command("%s %s" % (cmd, path)) diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index 2ba54bd4..c4a116f4 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -__version__ = "3.2.0" +__version__ = "3.3.2" __project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index 5ba7e5e7..7f09e7ec 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -33,7 +33,8 @@ def __init__(self, value=None): def __str__(self): return str(self.value) - __repr__ = lambda s: "<_Default [%s]>" % s.value + def __repr__(self): + return "<_Default [%s]>" % self.value def split_csp_str(s): diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 88bbc346..8c2841b6 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -6,9 +6,11 @@ import re import logging +from collections import defaultdict from .config import process_value, LOGGER from .lint.extensions import LINTERS +from .errors import DUPLICATES, Error #: The skip pattern @@ -20,18 +22,24 @@ re.I | re.M) -def run(path, code=None, options=None): +def run(path='', code=None, options=None): """ Run a code checkers with given params. :return errors: list of dictionaries with error's information """ errors = [] - params = dict(ignore=options.ignore, select=options.select) fileconfig = dict() - for mask in options.file_params: - if mask.match(path): - fileconfig.update(options.file_params[mask]) + params = dict() + linters = LINTERS + linter_params = dict() + + if options: + linters = options.linters + linter_params = options.linter_params + for mask in options.file_params: + if mask.match(path): + fileconfig.update(options.file_params[mask]) try: with CodeContext(code, path) as ctx: @@ -41,51 +49,43 @@ def run(path, code=None, options=None): if params.get('skip'): return errors - for item in options.linters: + for item in linters: if not isinstance(item, tuple): item = (item, LINTERS.get(item)) - name, linter = item + lname, linter = item - if not linter or not linter.allow(path): + if not linter or path and not linter.allow(path): continue - LOGGER.info("Run %s", name) - meta = options.linter_params.get(name, dict()) - result = linter.run(path, code=code, **meta) - for e in result: - e['linter'] = name - e['col'] = e.get('col') or 0 - e['lnum'] = e.get('lnum') or 0 - e['type'] = e.get('type') or 'E' - e['text'] = "%s [%s]" % ( - e.get('text', '').strip().split('\n')[0], name) - e['filename'] = path or '' - errors.append(e) + LOGGER.info("Run %s", lname) + meta = linter_params.get(lname, dict()) + errors += [Error(filename=path, linter=lname, **e) + for e in linter.run(path, code=code, **meta)] except IOError as e: LOGGER.debug("IOError %s", e) - errors.append(dict( - lnum=0, type='E', col=0, text=str(e), filename=path or '')) + errors.append(Error(text=str(e), filename=path, linter=lname)) except SyntaxError as e: LOGGER.debug("SyntaxError %s", e) - errors.append(dict( - lnum=e.lineno or 0, type='E', col=e.offset or 0, - text=e.args[0] + ' [%s]' % name, filename=path or '' - )) + errors.append( + Error(linter=lname, lnum=e.lineno, col=e.offset, text=e.args[0], + filename=path)) except Exception as e: import traceback LOGGER.info(traceback.format_exc()) - errors = [er for er in errors if filter_errors(er, **params)] + errors = filter_errors(errors, **params) + + errors = list(remove_duplicates(errors)) if code and errors: errors = filter_skiplines(code, errors) - return sorted(errors, key=lambda x: x['lnum']) + return sorted(errors, key=lambda e: e.lnum) def parse_modeline(code): @@ -107,7 +107,10 @@ def prepare_params(modeline, fileconfig, options): :return dict: """ - params = dict(ignore=options.ignore, select=options.select, skip=False) + params = dict(skip=False, ignore=[], select=[]) + if options: + params['ignore'] = options.ignore + params['select'] = options.select for config in filter(None, [modeline, fileconfig]): for key in ('ignore', 'select'): @@ -120,23 +123,26 @@ def prepare_params(modeline, fileconfig, options): return params -def filter_errors(e, select=None, ignore=None, **params): +def filter_errors(errors, select=None, ignore=None, **params): """ Filter a erros by select and ignore options. :return bool: """ - if select: - for s in select: - if e['text'].startswith(s): - return True - - if ignore: - for s in ignore: - if e['text'].startswith(s): - return False + select = select or [] + ignore = ignore or [] - return True + for e in errors: + for s in select: + if e.number.startswith(s): + yield e + break + else: + for s in ignore: + if e.number.startswith(s): + break + else: + yield e def filter_skiplines(code, errors): @@ -148,18 +154,30 @@ def filter_skiplines(code, errors): if not errors: return errors - enums = set(er['lnum'] for er in errors) + enums = set(er.lnum for er in errors) removed = set([ num for num, l in enumerate(code.split('\n'), 1) if num in enums and SKIP_PATTERN(l) ]) if removed: - errors = [er for er in errors if not er['lnum'] in removed] + errors = [er for er in errors if er.lnum not in removed] return errors +def remove_duplicates(errors): + """ Remove same errors from others linters. """ + passed = defaultdict(list) + for error in errors: + key = error.linter, error.number + if key in DUPLICATES: + if key in passed[error.lnum]: + continue + passed[error.lnum] = DUPLICATES[key] + yield error + + class CodeContext(object): """ Read file if code is None. """ diff --git a/pymode/libs/pylama/errors.py b/pymode/libs/pylama/errors.py new file mode 100644 index 00000000..5e8aa026 --- /dev/null +++ b/pymode/libs/pylama/errors.py @@ -0,0 +1,63 @@ +""" Dont duplicate errors same type. """ + +DUPLICATES = ( + + # multiple statements on one line + [('pep8', 'E701'), ('pylint', 'C0321')], + + # missing whitespace around operator + [('pep8', 'E225'), ('pylint', 'C0326')], + + # unused variable + [('pylint', 'W0612'), ('pyflakes', 'W0612')], + + # undefined variable + [('pylint', 'E0602'), ('pyflakes', 'E0602')], + + # unused import + [('pylint', 'W0611'), ('pyflakes', 'W0611')], + + # unexpected spaces + [('pylint', 'C0326'), ('pep8', 'E251')], + + # long lines + [('pylint', 'C0301'), ('pep8', 'E501')], + + # whitespace before '(' + [('pylint', 'C0326'), ('pep8', 'E211')], + + # statement ends with a semicolon + [('pylint', 'W0301'), ('pep8', 'E703')], + + # multiple statements on one line + [('pylint', 'C0321'), ('pep8', 'E702')], + +) + +DUPLICATES = dict((key, values) for values in DUPLICATES for key in values) + + +class Error(object): + + """ Store error information. """ + + def __init__(self, linter="", col=1, lnum=1, type="E", + text="unknown error", filename="", **kwargs): + """ Init error information with default values. """ + text = ' '.join(str(text).strip().split('\n')) + if linter: + text = "%s [%s]" % (text, linter) + number = text.split(' ', 1)[0] + self._info = dict(linter=linter, col=col, lnum=lnum, type=type, + text=text, filename=filename, number=number) + + def __getattr__(self, name): + return self._info[name] + + def __getitem__(self, name): + return self._info[name] + + def __repr__(self): + return "" % (self.number, self.linter) + +# pylama:ignore=W0622,D diff --git a/pymode/libs/pylama/hook.py b/pymode/libs/pylama/hook.py index cd1961ec..0dc34069 100644 --- a/pymode/libs/pylama/hook.py +++ b/pymode/libs/pylama/hook.py @@ -1,4 +1,5 @@ """ SCM hooks. Integration with git and mercurial. """ + from __future__ import absolute_import import sys diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py index bc911374..b31a9781 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pylama/lint/pylama_pep8/pep8.py @@ -46,7 +46,7 @@ """ from __future__ import with_statement -__version__ = '1.5.7a0' +__version__ = '1.6.0a0' import os import sys @@ -64,7 +64,7 @@ from ConfigParser import RawConfigParser DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__' -DEFAULT_IGNORE = 'E123,E226,E24' +DEFAULT_IGNORE = 'E123,E226,E24,E704' if sys.platform == 'win32': DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8') else: @@ -353,20 +353,25 @@ def indentation(logical_line, previous_logical, indent_char, Okay: a = 1 Okay: if a == 0:\n a = 1 E111: a = 1 + E114: # a = 1 Okay: for item in items:\n pass E112: for item in items:\npass + E115: for item in items:\n# Hi\n pass Okay: a = 1\nb = 2 E113: a = 1\n b = 2 + E116: a = 1\n # b = 2 """ - if indent_char == ' ' and indent_level % 4: - yield 0, "E111 indentation is not a multiple of four" + c = 0 if logical_line else 3 + tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)" + if indent_level % 4: + yield 0, tmpl % (1 + c, "indentation is not a multiple of four") indent_expect = previous_logical.endswith(':') if indent_expect and indent_level <= previous_indent_level: - yield 0, "E112 expected an indented block" - if indent_level > previous_indent_level and not indent_expect: - yield 0, "E113 unexpected indentation" + yield 0, tmpl % (2 + c, "expected an indented block") + elif not indent_expect and indent_level > previous_indent_level: + yield 0, tmpl % (3 + c, "unexpected indentation") def continued_indentation(logical_line, tokens, indent_level, hang_closing, @@ -787,6 +792,7 @@ def whitespace_before_comment(logical_line, tokens): E262: x = x + 1 #Increment x E262: x = x + 1 # Increment x E265: #Block comment + E266: ### Block comment """ prev_end = (0, 0) for token_type, text, start, end, line in tokens: @@ -797,13 +803,15 @@ def whitespace_before_comment(logical_line, tokens): yield (prev_end, "E261 at least two spaces before inline comment") symbol, sp, comment = text.partition(' ') - bad_prefix = symbol not in ('#', '#:') + bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#') if inline_comment: - if bad_prefix or comment[:1].isspace(): + if bad_prefix or comment[:1] in WHITESPACE: yield start, "E262 inline comment should start with '# '" - elif bad_prefix: - if text.rstrip('#') and (start[0] > 1 or symbol[1] != '!'): + elif bad_prefix and (bad_prefix != '!' or start[0] > 1): + if bad_prefix != '#': yield start, "E265 block comment should start with '# '" + elif comment: + yield start, "E266 too many leading '#' for block comment" elif token_type != tokenize.NL: prev_end = end @@ -834,6 +842,9 @@ def compound_statements(logical_line): on the same line, never do this for multi-clause statements. Also avoid folding such long lines! + Always use a def statement instead of an assignment statement that + binds a lambda expression directly to a name. + Okay: if foo == 'blah':\n do_blah_thing() Okay: do_one() Okay: do_two() @@ -847,20 +858,26 @@ def compound_statements(logical_line): E701: try: something() E701: finally: cleanup() E701: if foo == 'blah': one(); two(); three() - E702: do_one(); do_two(); do_three() E703: do_four(); # useless semicolon + E704: def f(x): return 2*x + E731: f = lambda x: 2*x """ line = logical_line last_char = len(line) - 1 found = line.find(':') while -1 < found < last_char: before = line[:found] - if (before.count('{') <= before.count('}') and # {'a': 1} (dict) - before.count('[') <= before.count(']') and # [1:2] (slice) - before.count('(') <= before.count(')') and # (Python 3 annotation) - not LAMBDA_REGEX.search(before)): # lambda x: x - yield found, "E701 multiple statements on one line (colon)" + if ((before.count('{') <= before.count('}') and # {'a': 1} (dict) + before.count('[') <= before.count(']') and # [1:2] (slice) + before.count('(') <= before.count(')'))): # (annotation) + if LAMBDA_REGEX.search(before): + yield 0, "E731 do not assign a lambda expression, use a def" + break + if before.startswith('def '): + yield 0, "E704 multiple statements on one line (def)" + else: + yield found, "E701 multiple statements on one line (colon)" found = line.find(':', found + 1) found = line.find(';') while -1 < found: @@ -1037,7 +1054,7 @@ def python_3000_backticks(logical_line): # Python 2: implicit encoding. def readlines(filename): """Read the source code.""" - with open(filename) as f: + with open(filename, 'rU') as f: return f.readlines() isidentifier = re.compile(r'[a-zA-Z_]\w*').match stdin_get_value = sys.stdin.read @@ -1367,6 +1384,8 @@ def generate_tokens(self): tokengen = tokenize.generate_tokens(self.readline) try: for token in tokengen: + if token[2][0] > self.total_lines: + return self.maybe_check_physical(token) yield token except (SyntaxError, tokenize.TokenError): @@ -1449,10 +1468,8 @@ def check_all(self, expected=None, line_offset=0): token[3] = (token[2][0], token[2][1] + len(token[1])) self.tokens = [tuple(token)] self.check_logical() - if len(self.tokens) > 1 and (token_type == tokenize.ENDMARKER and - self.tokens[-2][0] not in SKIP_TOKENS): - self.tokens.pop() - self.check_physical(self.tokens[-1][4]) + if self.tokens: + self.check_physical(self.lines[-1]) self.check_logical() return self.report.get_file_results() diff --git a/pymode/libs/pylama/main.py b/pymode/libs/pylama/main.py index a52d2255..6a0a3268 100644 --- a/pymode/libs/pylama/main.py +++ b/pymode/libs/pylama/main.py @@ -1,4 +1,5 @@ """ Pylama's shell support. """ + from __future__ import absolute_import, with_statement import sys @@ -72,7 +73,7 @@ def check_files(paths, options, rootpath=None, error=True): errors = async_check_files(work_paths, options, rootpath=rootpath) for er in errors: - LOGGER.warning(pattern, er) + LOGGER.warning(pattern, er._info) if error: sys.exit(int(bool(errors))) diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/tasks.py index a7172b31..43914909 100644 --- a/pymode/libs/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -1,4 +1,5 @@ -""" Async code checking. """ +""" Support for asyncronious code checking. """ + import logging import threading from os import path as op @@ -91,9 +92,11 @@ def check_path(path, options=None, rootpath=None, code=None, **meta): errors = [] for error in run(path, code, options): try: - error['rel'] = op.relpath(error['filename'], rootpath) - error['col'] = error.get('col', 1) + error._info['rel'] = op.relpath(error.filename, rootpath) errors.append(error) except KeyError: continue + return errors + +# pylama:ignore=W0212 diff --git a/pymode/lint.py b/pymode/lint.py index 6c89852e..fdc01e23 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -12,7 +12,6 @@ def code_check(): :return bool: """ - with silence_stderr(): from pylama.main import parse_options @@ -57,6 +56,8 @@ def __sort(e): errors = sorted(errors, key=__sort) for e in errors: - e['bufnr'] = env.curbuf.number + e._info['bufnr'] = env.curbuf.number + + env.run('g:PymodeLocList.current().extend', [e._info for e in errors]) - env.run('g:PymodeLocList.current().extend', errors) +# pylama:ignore=W0212 diff --git a/pymode/rope.py b/pymode/rope.py index 1f36caa7..fd6cc047 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -45,7 +45,6 @@ def completions(): :return None: """ - row, col = env.cursor if env.var('a:findstart', True): count = 0 @@ -109,7 +108,6 @@ def get_proporsals(source, offset, base='', dot=False): :return str: """ - with RopeContext() as ctx: try: @@ -140,7 +138,6 @@ def get_proporsals(source, offset, base='', dot=False): @env.catch_exceptions def goto(): """ Goto definition. """ - with RopeContext() as ctx: source, offset = env.get_offset_params() @@ -152,14 +149,14 @@ def goto(): return env.goto_file( - found_resource.real_path, cmd=ctx.options.get('goto_definition_cmd')) + found_resource.real_path, + cmd=ctx.options.get('goto_definition_cmd')) env.goto_line(line) @env.catch_exceptions def show_doc(): """ Show documentation. """ - with RopeContext() as ctx: source, offset = env.get_offset_params() try: @@ -174,7 +171,6 @@ def show_doc(): def find_it(): """ Find occurrences. """ - with RopeContext() as ctx: _, offset = env.get_offset_params() try: @@ -195,7 +191,6 @@ def find_it(): def update_python_path(paths): """ Update sys.path and make sure the new items come first. """ - old_sys_path_items = list(sys.path) for path in paths: @@ -213,7 +208,6 @@ def update_python_path(paths): def organize_imports(): """ Organize imports in current file. """ - with RopeContext() as ctx: organizer = ImportOrganizer(ctx.project) changes = organizer.organize_imports(ctx.resource) @@ -254,7 +248,6 @@ def undo(): :return bool: """ - with RopeContext() as ctx: changes = ctx.project.history.tobe_undone if changes is None: @@ -273,7 +266,6 @@ def redo(): :return bool: """ - with RopeContext() as ctx: changes = ctx.project.history.tobe_redone if changes is None: @@ -342,7 +334,8 @@ def autoimport(): _insert_import(word, modules[0], ctx) else: - module = env.user_input_choices('Which module to import:', *modules) + module = env.user_input_choices( + 'Which module to import:', *modules) _insert_import(word, module, ctx) return True @@ -354,7 +347,7 @@ class RopeContext(object): """ A context manager to have a rope project context. """ def __init__(self, path, project_path): - + """ Init Rope context. """ self.path = path self.project = project.Project( @@ -384,6 +377,7 @@ def __init__(self, path, project_path): env.message('Init Rope project: %s' % project_path) def __enter__(self): + """ Enter to Rope ctx. """ env.let('g:pymode_rope_current', self.project.root.real_path) self.project.validate(self.project.root) self.resource = libutils.path_to_resource( @@ -398,12 +392,12 @@ def __enter__(self): return self def __exit__(self, t, value, traceback): + """ Exit from Rope ctx. """ if t is None: self.project.close() def generate_autoimport_cache(self): """ Update autoimport cache. """ - env.message('Regenerate autoimport cache.') modules = self.options.get('autoimport_modules', []) @@ -426,6 +420,7 @@ class ProgressHandler(object): """ Handle task progress. """ def __init__(self, msg): + """ Init progress handler. """ self.handle = TaskHandle(name="refactoring_handle") self.handle.add_observer(self) self.message = msg @@ -455,7 +450,6 @@ def run(self): :return bool: """ - with RopeContext() as ctx: if not ctx.resource: @@ -497,7 +491,6 @@ def run(self): @staticmethod def get_refactor(ctx): """ Get refactor object. """ - raise NotImplementedError @staticmethod @@ -507,7 +500,6 @@ def get_input_str(refactor, ctx): :return bool: True """ - return True @staticmethod @@ -764,7 +756,7 @@ def get_changes(self, refactor, input_string): olds = [arg[0] for arg in refactor.get_args()] changers = [] - for arg in [a for a in olds if not a in args]: + for arg in [a for a in olds if a not in args]: changers.append(change_signature.ArgumentRemover(olds.index(arg))) olds.remove(arg) @@ -945,3 +937,5 @@ def _insert_import(name, module, ctx): progress = ProgressHandler('Apply changes ...') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes) + +# pylama:ignore=W1401,E1120,D diff --git a/pymode/utils.py b/pymode/utils.py index 43003c17..4a780a14 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -23,7 +23,6 @@ @contextmanager def silence_stderr(): """ Redirect stderr. """ - if DEBUG: yield @@ -40,7 +39,6 @@ def silence_stderr(): def patch_paths(): """ Function description. """ - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs')) if PY2: diff --git a/pymode/virtualenv.py b/pymode/virtualenv.py index 87734f54..08194b20 100644 --- a/pymode/virtualenv.py +++ b/pymode/virtualenv.py @@ -12,7 +12,6 @@ def enable_virtualenv(): :return bool: """ - path = env.var('g:pymode_virtualenv_path') enabled = env.var('g:pymode_virtualenv_enabled') if path == enabled: From 246addfb2cca45573297012b736d1ca73ce9ad17 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Jun 2014 13:15:38 +0400 Subject: [PATCH 099/428] Fix code. --- Changelog.rst | 3 ++- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- pymode/rope.py | 5 ++++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index d69ea7f5..03e6fee8 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,12 +1,13 @@ Changelog ========= + * Pylama updated to version 3.3.2 * Get fold's expression symbol from &fillchars; * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); * Fixed code running; * Ability to override rope project root and .ropeproject folder * Added path argument to `PymodeRopeNewProject` which skips prompt - + * Disable `pymode_rope_lookup_project` by default * Options added: 'pymode_rope_project_root', 'pymode_rope_ropefolder' diff --git a/doc/pymode.txt b/doc/pymode.txt index 94e28782..214b26ae 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -381,7 +381,7 @@ possibly unrelated, files) Enable searching for |.ropeproject| in parent directories *'g:pymode_rope_lookup_project'* > - let g:pymode_rope_lookup_project = 1 + let g:pymode_rope_lookup_project = 0 You can also manually set the rope project directory. If not specified rope will use the current directory. diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 077cb351..1e3b9be2 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -158,7 +158,7 @@ call pymode#default('g:pymode_rope_project_root', '') call pymode#default('g:pymode_rope_ropefolder', '.ropeproject') " If project hasnt been finded in current working directory, look at parents directory -call pymode#default('g:pymode_rope_lookup_project', 1) +call pymode#default('g:pymode_rope_lookup_project', 0) " Enable Rope completion call pymode#default('g:pymode_rope_completion', 1) diff --git a/pymode/rope.py b/pymode/rope.py index fd6cc047..79654cdd 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -293,7 +293,7 @@ def get_ctx(*args, **kwargs): return resources.get(path) project_path = env.var('g:pymode_rope_project_root') - if project_path: + if not project_path: project_path = env.curdir env.debug('Look ctx', project_path) if env.var('g:pymode_rope_lookup_project', True): @@ -302,10 +302,13 @@ def get_ctx(*args, **kwargs): if not os.path.exists(project_path): env.error("Rope project root not exist: %s" % project_path) ctx = None + else: ctx = projects.get(project_path) + if not ctx: projects[project_path] = ctx = cls(path, project_path) + resources[path] = ctx return ctx return get_ctx From a9e2891b4ba23948bf052be8a43bb9c30e82f9ae Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Jun 2014 13:20:09 +0400 Subject: [PATCH 100/428] Update authors --- AUTHORS | 61 ++++++++++++++++++++++++++++++--------------------------- 1 file changed, 32 insertions(+), 29 deletions(-) diff --git a/AUTHORS b/AUTHORS index a287aabe..e1abde9b 100644 --- a/AUTHORS +++ b/AUTHORS @@ -5,36 +5,39 @@ Maintainer: Contributors: -* Alvin Francis (alvinfrancis); -* Anler Hp (ikame); -* Anton Parkhomenko (chuwy); -* Ashley Hewson (ashleyh); -* Benjamin Ruston (bruston); -* Boris Filippov (frenzykryger); -* Brad Mease (bmease) -* Daniel Hahler (blueyed) -* David Vogt (winged); -* Denis Kasak (dkasak); -* Dirk Wallenstein (dirkwallenstein); -* Florent Xicluna (florentx); -* Fredrik Henrysson (fhenrysson); -* Igor Guerrero (igorgue); -* Jay Rainey (jawrainey); -* Jonathan McCall (Jonnymcc); -* Kevin Deldycke (kdeldycke); -* Lowe Thiderman (thiderman); -* Martin Brochhaus (mbrochh); -* Matthew Moses (mlmoses); -* Mel Boyce (syngin) -* Mohammed (mbadran); -* Naoya Inada (naoina); -* Pedro Algarvio (s0undt3ch); -* Phillip Cloud (cpcloud); -* Piet Delport (pjdelport); -* Robert David Grant (bgrant); -* Ronald Andreu Kaiser (cathoderay); +* Alvin Francis (http://github.com/alvinfrancis); +* Anler Hp (http://github.com/ikame); +* Anton Parkhomenko (http://github.com/chuwy); +* Ashley Hewson (http://github.com/ashleyh); +* Benjamin Ruston (http://github.com/bruston); +* Boris Filippov (http://github.com/frenzykryger); +* Brad Mease (http://github.com/bmease) +* Daniel Hahler (http://github.com/blueyed) +* David Vogt (http://github.com/winged); +* Denis Kasak (http://github.com/dkasak); +* Dirk Wallenstein (http://github.com/dirkwallenstein); +* Florent Xicluna (http://github.com/florentx); +* Fredrik Henrysson (http://github.com/fhenrysson); +* Igor Guerrero (http://github.com/igorgue); +* Jason Harvey (http://github.com/alienth) +* Jay Rainey (https://github.com/jawrainey) +* Jonathan McCall (http://github.com/Jonnymcc); +* Kevin Deldycke (http://github.com/kdeldycke); +* Lowe Thiderman (http://github.com/thiderman); +* Martin Brochhaus (http://github.com/mbrochh); +* Matthew Moses (http://github.com/mlmoses); +* Mel Boyce (http://github.com/syngin) +* Mohammed (http://github.com/mbadran); +* Naoya Inada (http://github.com/naoina); +* Pedro Algarvio (http://github.com/s0undt3ch); +* Phillip Cloud (http://github.com/cpcloud); +* Piet Delport (http://github.com/pjdelport); +* Robert David Grant (http://github.com/bgrant); +* Ronald Andreu Kaiser (http://github.com/cathoderay); * Sorin Ionescu (sorin-ionescu); -* Steve Losh (sjl); +* Steve Losh (http://github.com/sjl); +* bendavis78 (http://github.com/bendavis78) +* fwuzju (http://github.com/fwuzju) * lawrenceakka; * lee (loyalpartner); * nixon; From 99bc09243a5210b45fe8dbf6f5c37ddc299930f5 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Jun 2014 13:49:08 +0400 Subject: [PATCH 101/428] Update pylama --- pymode/libs/pylama/config.py | 4 ++++ pymode/libs/pylama/core.py | 2 +- pymode/libs/pylama/main.py | 2 +- pymode/libs/pylama/tasks.py | 2 +- pymode/lint.py | 1 + 5 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index 7f09e7ec..b21959d3 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -113,6 +113,10 @@ def parse_linters(linters): "--options", "-o", default=_Default(DEFAULT_INI_PATH), help="Select configuration file. By default is '/pylama.ini'") +PARSER.add_argument( + "--force", "-F", action='store_true', default=_Default(False), + help="Force code checking (if linter doesnt allow)") + ACTIONS = dict((a.dest, a) for a in PARSER._actions) diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 8c2841b6..6d19d4a7 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -56,7 +56,7 @@ def run(path='', code=None, options=None): lname, linter = item - if not linter or path and not linter.allow(path): + if not linter: continue LOGGER.info("Run %s", lname) diff --git a/pymode/libs/pylama/main.py b/pymode/libs/pylama/main.py index 6a0a3268..9ce91c37 100644 --- a/pymode/libs/pylama/main.py +++ b/pymode/libs/pylama/main.py @@ -59,7 +59,7 @@ def check_files(paths, options, rootpath=None, error=True): work_paths = [] for path in paths: - if not any(l.allow(path) for _, l in options.linters): + if not options.force and not any(l.allow(path) for _, l in options.linters): # noqa continue if not op.exists(path): diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/tasks.py index 43914909..69881e1e 100644 --- a/pymode/libs/pylama/tasks.py +++ b/pymode/libs/pylama/tasks.py @@ -80,7 +80,7 @@ def async_check_files(paths, options, rootpath=None): return errors -def check_path(path, options=None, rootpath=None, code=None, **meta): +def check_path(path, options=None, rootpath=None, code=None): """ Check path. :return list: list of errors diff --git a/pymode/lint.py b/pymode/lint.py index fdc01e23..ab210a90 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -24,6 +24,7 @@ def code_check(): ignore=env.var('g:pymode_lint_ignore'), select=env.var('g:pymode_lint_select'), linters=env.var('g:pymode_lint_checkers'), + force=1, ) env.debug(options) From 557057e10ac9477fc46142822c467f2b528b0f1d Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 7 Jun 2014 13:50:13 +0400 Subject: [PATCH 102/428] Update version --- Changelog.rst | 2 ++ doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 03e6fee8..705cbd02 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,6 +1,8 @@ Changelog ========= +## 2013-12-04 0.8.0 +------------------- * Pylama updated to version 3.3.2 * Get fold's expression symbol from &fillchars; * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); diff --git a/doc/pymode.txt b/doc/pymode.txt index 214b26ae..d623fe5e 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.7.8b + Version: 0.8.0 ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 1e3b9be2..e4cd5562 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.7.8b" +let g:pymode_version = "0.8.0" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From 304a68e0e92bca931e1a2098168a8991e95a9401 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 9 Jun 2014 00:05:10 +0400 Subject: [PATCH 103/428] Update pylama to version 4.0.1 Supports configuration from setup.cfg, tox.ini, pytest.ini files --- pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/config.py | 56 +++++++++++++++-------- pymode/libs/pylama/libs/inirama.py | 73 ++++++++++++++---------------- 3 files changed, 71 insertions(+), 60 deletions(-) diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index c4a116f4..23f7fd30 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -__version__ = "3.3.2" +__version__ = "4.0.1" __project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index b21959d3..d6f7c72f 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -1,7 +1,7 @@ """ Parse arguments from command line and configuration files. """ import fnmatch import sys -from os import getcwd, path +import os from re import compile as re import logging @@ -21,8 +21,11 @@ #: A default checkers DEFAULT_LINTERS = 'pep8', 'pyflakes', 'mccabe' -CURDIR = getcwd() -DEFAULT_INI_PATH = path.join(CURDIR, 'pylama.ini') +CURDIR = os.getcwd() +CONFIG_FILES = [ + os.path.join(CURDIR, basename) for basename in + ('pylama.ini', 'setup.cfg', 'tox.ini', 'pytest.ini') +] class _Default(object): @@ -67,7 +70,7 @@ def parse_linters(linters): PARSER = ArgumentParser(description="Code audit tool for python.") PARSER.add_argument( "path", nargs='?', default=_Default(CURDIR), - help="Path on file or directory.") + help="Path on file or directory for code check.") PARSER.add_argument( "--verbose", "-v", action='store_true', help="Verbose mode.") @@ -77,11 +80,11 @@ def parse_linters(linters): PARSER.add_argument( "--format", "-f", default=_Default('pep8'), choices=['pep8', 'pylint'], - help="Error format.") + help="Choose errors format (pep8, pylint).") PARSER.add_argument( "--select", "-s", default=_Default(''), type=split_csp_str, - help="Select errors and warnings. (comma-separated)") + help="Select errors and warnings. (comma-separated list)") PARSER.add_argument( @@ -100,7 +103,7 @@ def parse_linters(linters): type=lambda s: [re(fnmatch.translate(p)) for p in s.split(',') if p], help="Skip files by masks (comma-separated, Ex. */messages.py)") -PARSER.add_argument("--report", "-r", help="Filename for report.") +PARSER.add_argument("--report", "-r", help="Send report to file [REPORT]") PARSER.add_argument( "--hook", action="store_true", help="Install Git (Mercurial) hook.") @@ -110,7 +113,7 @@ def parse_linters(linters): "Dont supported with pylint.") PARSER.add_argument( - "--options", "-o", default=_Default(DEFAULT_INI_PATH), + "--options", "-o", default="", help="Select configuration file. By default is '/pylama.ini'") PARSER.add_argument( @@ -151,17 +154,22 @@ def parse_options(args=None, config=True, **overrides): # noqa setattr(options, k, _Default(v)) # Parse file related options - for k, s in cfg.sections.items(): - if k == cfg.default_section: + for name, opts in cfg.sections.items(): + + if not name.startswith('pylama'): + continue + + if name == cfg.default_section: continue - if k in LINTERS: - options.linter_params[k] = dict(s) + + name = name[7:] + + if name in LINTERS: + options.linter_params[name] = dict(opts) continue - mask = re(fnmatch.translate(k)) - options.file_params[mask] = dict(s) - options.file_params[mask]['lint'] = int( - options.file_params[mask].get('lint', 1) - ) + + mask = re(fnmatch.translate(name)) + options.file_params[mask] = dict(opts) # Postprocess options opts = dict(options.__dict__.items()) @@ -187,15 +195,21 @@ def process_value(name, value): return value -def get_config(ini_path=DEFAULT_INI_PATH): +def get_config(ini_path=None): """ Load configuration from INI. :return Namespace: """ config = Namespace() - config.default_section = 'main' - config.read(ini_path) + config.default_section = 'pylama' + + if not ini_path: + for path in CONFIG_FILES: + if os.path.isfile(path) and os.access(path, os.R_OK): + config.read(path) + else: + config.read(ini_path) return config @@ -207,3 +221,5 @@ def setup_logger(options): LOGGER.removeHandler(STREAM) LOGGER.addHandler(logging.FileHandler(options.report, mode='w')) LOGGER.info('Try to read configuration from: ' + options.options) + +# pylama:ignore=W0212 diff --git a/pymode/libs/pylama/libs/inirama.py b/pymode/libs/pylama/libs/inirama.py index 095e8dc0..5c1dfd46 100644 --- a/pymode/libs/pylama/libs/inirama.py +++ b/pymode/libs/pylama/libs/inirama.py @@ -19,7 +19,6 @@ import io import re import logging -from collections import MutableMapping try: from collections import OrderedDict except ImportError: @@ -67,7 +66,7 @@ def keys(self): iteritems = DictMixin.iteritems -__version__ = "0.5.1" +__version__ = "0.7.0" __project__ = "Inirama" __author__ = "Kirill Klenov " __license__ = "BSD" @@ -170,7 +169,9 @@ class INIScanner(Scanner): ('SECTION', re.compile(r'\[[^]]+\]')), ('IGNORE', re.compile(r'[ \r\t\n]+')), ('COMMENT', re.compile(r'[;#].*')), - ('KEY', re.compile(r'[\w_]+\s*[:=].*'))] + ('KEY', re.compile(r'[\w_]+\s*[:=].*')), + ('CONTINUATION', re.compile(r'.*')) + ] ignore = ['IGNORE'] @@ -183,43 +184,20 @@ def pre_scan(self): undefined = object() -class Section(MutableMapping): +class Section(OrderedDict): """ Representation of INI section. """ def __init__(self, namespace, *args, **kwargs): super(Section, self).__init__(*args, **kwargs) self.namespace = namespace - self.__storage__ = dict() def __setitem__(self, name, value): value = str(value) if value.isdigit(): value = int(value) - self.__storage__[name] = value - - def __getitem__(self, name): - return self.__storage__[name] - - def __delitem__(self, name): - del self.__storage__[name] - - def __len__(self): - return len(self.__storage__) - - def __iter__(self): - return iter(self.__storage__) - - def __repr__(self): - return "<{0} {1}>".format(self.__class__.__name__, str(dict(self))) - - def iteritems(self): - """ Impletment iteritems. """ - for key in self.__storage__.keys(): - yield key, self[key] - - items = lambda s: list(s.iteritems()) + super(Section, self).__setitem__(name, value) class InterpolationSection(Section): @@ -246,19 +224,28 @@ def __interpolate__(self, math): except KeyError: return '' - def __getitem__(self, name): + def __getitem__(self, name, raw=False): value = super(InterpolationSection, self).__getitem__(name) - sample = undefined - while sample != value: - try: - sample, value = value, self.var_re.sub( - self.__interpolate__, value) - except RuntimeError: - message = "Interpolation failed: {0}".format(name) - NS_LOGGER.error(message) - raise ValueError(message) + if not raw: + sample = undefined + while sample != value: + try: + sample, value = value, self.var_re.sub( + self.__interpolate__, value) + except RuntimeError: + message = "Interpolation failed: {0}".format(name) + NS_LOGGER.error(message) + raise ValueError(message) return value + def iteritems(self, raw=False): + """ Iterate self items. """ + + for key in self: + yield key, self.__getitem__(key, raw=raw) + + items = iteritems + class Namespace(object): @@ -356,6 +343,7 @@ def parse(self, source, update=True, **params): scanner.scan() section = self.default_section + name = None for token in scanner.tokens: if token[0] == 'KEY': @@ -368,6 +356,13 @@ def parse(self, source, update=True, **params): elif token[0] == 'SECTION': section = token[1].strip('[]') + elif token[0] == 'CONTINUATION': + if not name: + raise SyntaxError( + "SyntaxError[@char {0}: {1}]".format( + token[2], "Bad continuation.")) + self[section][name] += '\n' + token[1].strip() + def __getitem__(self, name): """ Look name in self sections. @@ -406,4 +401,4 @@ class InterpolationNamespace(Namespace): section_type = InterpolationSection -# lint_ignore=W0201,R0924,F0401 +# pylama:ignore=D,W02,E731,W0621 From ca0078befc6ce92cfca9149e026e3dd0d9c5b8a7 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 11 Jun 2014 17:50:37 +0400 Subject: [PATCH 104/428] Update pylama. Fix #438 --- pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/errors.py | 7 +++ pymode/libs/pylama/pytest.py | 86 ++++++++++++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 pymode/libs/pylama/pytest.py diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index 23f7fd30..c3eb03a8 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -__version__ = "4.0.1" +__version__ = "5.0.1" __project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/errors.py b/pymode/libs/pylama/errors.py index 5e8aa026..58fcbef0 100644 --- a/pymode/libs/pylama/errors.py +++ b/pymode/libs/pylama/errors.py @@ -32,6 +32,9 @@ # multiple statements on one line [('pylint', 'C0321'), ('pep8', 'E702')], + # bad indentation + [('pylint', 'W0311'), ('pep8', 'E111')], + ) DUPLICATES = dict((key, values) for values in DUPLICATES for key in values) @@ -57,6 +60,10 @@ def __getattr__(self, name): def __getitem__(self, name): return self._info[name] + def get(self, name, default=None): + """ Implement dictionary `get` method. """ + return self._info.get(name, default) + def __repr__(self): return "" % (self.number, self.linter) diff --git a/pymode/libs/pylama/pytest.py b/pymode/libs/pylama/pytest.py new file mode 100644 index 00000000..cbfe787d --- /dev/null +++ b/pymode/libs/pylama/pytest.py @@ -0,0 +1,86 @@ +""" py.test plugin for checking files with pylama. """ +from __future__ import absolute_import + +from os import path as op + +import py +import pytest + + +HISTKEY = "pylama/mtimes" + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + '--pylama', action='store_true', + help="perform some pylama code checks on .py files") + + +def pytest_sessionstart(session): + config = session.config + if config.option.pylama and getattr(config, 'cache', None): + config._pylamamtimes = config.cache.get(HISTKEY, {}) + + +def pytest_sessionfinish(session): + config = session.config + if hasattr(config, "_pylamamtimes"): + config.cache.set(HISTKEY, config._pylamamtimes) + + +def pytest_collect_file(path, parent): + config = parent.config + if config.option.pylama and path.ext == '.py': + return PylamaItem(path, parent) + + +class PylamaError(Exception): + """ indicates an error during pylama checks. """ + + +class PylamaItem(pytest.Item, pytest.File): + + def __init__(self, path, parent): + super(PylamaItem, self).__init__(path, parent) + self.add_marker("pep8") + self.cache = None + self._pylamamtimes = None + + def setup(self): + if not getattr(self.config, 'cache', None): + return False + + self.cache = True + self._pylamamtimes = self.fspath.mtime() + pylamamtimes = self.config._pylamamtimes + old = pylamamtimes.get(str(self.fspath), 0) + if old == self._pylamamtimes: + pytest.skip("file(s) previously passed Pylama checks") + + def runtest(self): + call = py.io.StdCapture.call + errors, out, err = call(check_file, self.fspath) + # errors = check_file(self.fspath) + if errors: + raise PylamaError(out, err) + # update mtime only if test passed + # otherwise failures would not be re-run next time + if self.cache: + self.config._pylamamtimes[str(self.fspath)] = self._pylamamtimes + + def repr_failure(self, excinfo): + if excinfo.errisinstance(PylamaError): + return excinfo.value.args[0] + return super(PylamaItem, self).repr_failure(excinfo) + + +def check_file(path): + from pylama.main import parse_options, check_files + from pylama.config import CURDIR + + options = parse_options() + path = op.relpath(str(path), CURDIR) + return check_files([path], options, error=False) + +# pylama:ignore=D,E1002,W0212 From 0c54d086ded1a26a675aa20c5db8cbeef767c788 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 11 Jun 2014 18:04:04 +0400 Subject: [PATCH 105/428] Update python3 support. Fix #437 --- pymode/__init__.py | 5 +-- pymode/_compat.py | 98 +++++++++++++++++++++++++++++++++++++++++++ pymode/async.py | 5 +-- pymode/environment.py | 2 +- pymode/rope.py | 3 +- pymode/run.py | 9 +--- pymode/utils.py | 8 +--- 7 files changed, 105 insertions(+), 25 deletions(-) create mode 100644 pymode/_compat.py diff --git a/pymode/__init__.py b/pymode/__init__.py index 602bd95f..37691abb 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -29,10 +29,7 @@ class Options(object): def get_documentation(): """ Search documentation and append to current buffer. """ - try: - from StringIO import StringIO - except ImportError: - from io import StringIO + from ._compat import StringIO sys.stdout, _ = StringIO(), sys.stdout help(vim.eval('a:word')) diff --git a/pymode/_compat.py b/pymode/_compat.py new file mode 100644 index 00000000..d859f152 --- /dev/null +++ b/pymode/_compat.py @@ -0,0 +1,98 @@ +""" Compatibility. + + Some py2/py3 compatibility support based on a stripped down + version of six so we don't have to depend on a specific version + of it. + + :copyright: (c) 2014 by Armin Ronacher. + :license: BSD +""" +import sys + +PY2 = sys.version_info[0] == 2 +_identity = lambda x: x + + +if not PY2: + text_type = str + string_types = (str,) + integer_types = (int, ) + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from io import StringIO + from queue import Queue # noqa + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + implements_to_string = _identity + +else: + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from cStringIO import StringIO + from Queue import Queue + + exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + # This requires a bit of explanation: the basic idea is to make a + # dummy metaclass for one level of class instantiation that replaces + # itself with the actual metaclass. Because of internal type checks + # we also need to make sure that we downgrade the custom metaclass + # for one level to something closer to type (that's why __call__ and + # __init__ comes back from type etc.). + # + # This has the advantage over six.with_metaclass in that it does not + # introduce dummy classes into the final MRO. + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +# Certain versions of pypy have a bug where clearing the exception stack +# breaks the __exit__ function in a very peculiar way. This is currently +# true for pypy 2.2.1 for instance. The second level of exception blocks +# is necessary because pypy seems to forget to check if an exception +# happend until the next bytecode instruction? +BROKEN_PYPY_CTXMGR_EXIT = False +if hasattr(sys, 'pypy_version_info'): + class _Mgr(object): + def __enter__(self): + return self + def __exit__(self, *args): + sys.exc_clear() + try: + try: + with _Mgr(): + raise AssertionError() + except: + raise + except TypeError: + BROKEN_PYPY_CTXMGR_EXIT = True + except AssertionError: + pass + +# pylama:skip=1 diff --git a/pymode/async.py b/pymode/async.py index 75f1ebdf..dd314d76 100644 --- a/pymode/async.py +++ b/pymode/async.py @@ -1,9 +1,6 @@ """ Python-mode async support. """ -try: - from Queue import Queue -except ImportError: - from queue import Queue # noqa +from ._compat import Queue RESULTS = Queue() diff --git a/pymode/environment.py b/pymode/environment.py index 69d97ffb..39b708e9 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -7,7 +7,7 @@ import time import os.path -from .utils import PY2 +from ._compat import PY2 class VimPymodeEnviroment(object): diff --git a/pymode/rope.py b/pymode/rope.py index 79654cdd..9888db71 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -6,7 +6,6 @@ import re import site import sys -import StringIO from rope.base import project, libutils, exceptions, change, worder # noqa from rope.base.fscommands import FileSystemCommands # noqa @@ -14,7 +13,7 @@ from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa from rope.refactor import ModuleToPackage, ImportOrganizer, rename, extract, inline, usefunction, move, change_signature, importutils # noqa - +from ._compat import StringIO from .environment import env diff --git a/pymode/run.py b/pymode/run.py index 3a8f24c2..b5a2bfa1 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -1,14 +1,9 @@ """ Code runnning support. """ - -try: - from StringIO import StringIO -except ImportError: - from io import StringIO - import sys +from re import compile as re +from ._compat import StringIO from .environment import env -from re import compile as re encoding = re(r'#[^\w]+coding:\s+utf.*$') diff --git a/pymode/utils.py b/pymode/utils.py index 4a780a14..79061857 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -6,16 +6,10 @@ from contextlib import contextmanager import vim # noqa - - -try: - from StringIO import StringIO -except ImportError: - from io import StringIO +from ._compat import StringIO, PY2 DEBUG = int(vim.eval('g:pymode_debug')) -PY2 = sys.version_info[0] == 2 warnings.filterwarnings('ignore') From 6cea61d4b12fdf29da8a5b481d2ac2a77cc960da Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 11 Jun 2014 18:19:41 +0400 Subject: [PATCH 106/428] Improve troubleshooting function --- autoload/pymode/troubleshooting.vim | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/autoload/pymode/troubleshooting.vim b/autoload/pymode/troubleshooting.vim index 31eb02a0..915a5c5e 100644 --- a/autoload/pymode/troubleshooting.vim +++ b/autoload/pymode/troubleshooting.vim @@ -18,22 +18,26 @@ fun! pymode#troubleshooting#test() "{{{ call append('0', ['Pymode diagnostic', \ '===================', - \ 'VIM:' . v:version . ', OS: ' . os .', multi_byte:' . has('multi_byte') . ', pymode: ' . g:pymode_version . ', python: ' . g:pymode_python, + \ 'VIM:' . v:version . ', OS: ' . os .', multi_byte:' . has('multi_byte') . ', pymode: ' . g:pymode_version . ', pymode-python: ' . g:pymode_python, \ '']) if !exists('#filetypeplugin') call append('$', ['WARNING: ', 'Python-mode required :filetype plugin indent on', '']) endif + call append('$', ['+python: ' . has('python')]) + call append('$', ['+python3: ' . has('python3'), '']) + if g:pymode_python == 'disable' + if !has('python') && !has('python3') - call append('$', ['WARNING: ', 'Python-mode required vim compiled with +python or +python3.', + call append('$', ['WARNING: Python-mode required vim compiled with +python or +python3.', \ '"lint, rope, run, doc, virtualenv" features disabled.', '']) else - call append('$', ['WARNING: ', 'Python is disabled by `pymode_python` option.', + call append('$', ['WARNING: Python is disabled by `pymode_python` option.', \ '"lint, rope, run, doc, virtualenv" features disabled.', '']) endif From 66b70dd22ecdbe8c957fb4c1716f376b0511f025 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 11 Jun 2014 18:20:47 +0400 Subject: [PATCH 107/428] Update version --- Changelog.rst | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 705cbd02..909010fc 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -## 2013-12-04 0.8.0 +## 2014-06-11 0.8.1 ------------------- * Pylama updated to version 3.3.2 * Get fold's expression symbol from &fillchars; diff --git a/doc/pymode.txt b/doc/pymode.txt index d623fe5e..ae2a5d27 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.8.0 + Version: 0.8.1 ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e4cd5562..12156402 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.8.0" +let g:pymode_version = "0.8.1" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From e5f1486f9b5522ab0fca39f17e0020801a049aab Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sun, 15 Jun 2014 20:25:29 +0400 Subject: [PATCH 108/428] Update pylama --- Changelog.rst | 20 +++--- pymode/libs/pylama/__init__.py | 2 +- pymode/libs/pylama/config.py | 4 +- pymode/libs/pylama/core.py | 16 +++-- pymode/libs/pylama/errors.py | 2 +- pymode/libs/pylama/libs/inirama.py | 17 ++--- pymode/libs/pylama/lint/extensions.py | 3 +- .../pylama/lint/pylama_mccabe/__init__.py | 3 +- .../libs/pylama/lint/pylama_pep8/__init__.py | 4 +- .../pylama/lint/pylama_pyflakes/__init__.py | 10 +-- .../pylama/lint/pylama_pylint/__init__.py | 2 +- .../lint/pylama_pylint/astroid/rebuilder.py | 8 +-- .../pylama_pylint/astroid/scoped_nodes.py | 34 ++++++++- pymode/libs/pylama/lint/pylama_pylint/main.py | 72 ++++++++++++++++--- .../pylint/checkers/exceptions.py | 2 + .../pylint/checkers/typecheck.py | 2 +- 16 files changed, 150 insertions(+), 51 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 909010fc..6ae3e35d 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,17 +1,19 @@ Changelog ========= +* Pylama updated to version 5.0.5 + ## 2014-06-11 0.8.1 ------------------- - * Pylama updated to version 3.3.2 - * Get fold's expression symbol from &fillchars; - * Fixed error when setting g:pymode_breakpoint_cmd (expobrain); - * Fixed code running; - * Ability to override rope project root and .ropeproject folder - * Added path argument to `PymodeRopeNewProject` which skips prompt - * Disable `pymode_rope_lookup_project` by default - * Options added: - 'pymode_rope_project_root', 'pymode_rope_ropefolder' +* Pylama updated to version 3.3.2 +* Get fold's expression symbol from &fillchars; +* Fixed error when setting g:pymode_breakpoint_cmd (expobrain); +* Fixed code running; +* Ability to override rope project root and .ropeproject folder +* Added path argument to `PymodeRopeNewProject` which skips prompt +* Disable `pymode_rope_lookup_project` by default +* Options added: + 'pymode_rope_project_root', 'pymode_rope_ropefolder' ## 2013-12-04 0.7.8b diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py index c3eb03a8..1576bfd2 100644 --- a/pymode/libs/pylama/__init__.py +++ b/pymode/libs/pylama/__init__.py @@ -5,7 +5,7 @@ """ -__version__ = "5.0.1" +__version__ = "5.0.5" __project__ = "pylama" __author__ = "Kirill Klenov " __license__ = "GNU LGPL" diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py index d6f7c72f..881e930a 100644 --- a/pymode/libs/pylama/config.py +++ b/pymode/libs/pylama/config.py @@ -136,7 +136,7 @@ def parse_options(args=None, config=True, **overrides): # noqa # Parse args from command string options = PARSER.parse_args(args) options.file_params = dict() - options.linter_params = dict() + options.linters_params = dict() # Override options for k, v in overrides.items(): @@ -165,7 +165,7 @@ def parse_options(args=None, config=True, **overrides): # noqa name = name[7:] if name in LINTERS: - options.linter_params[name] = dict(opts) + options.linters_params[name] = dict(opts) continue mask = re(fnmatch.translate(name)) diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py index 6d19d4a7..1283a662 100644 --- a/pymode/libs/pylama/core.py +++ b/pymode/libs/pylama/core.py @@ -32,11 +32,11 @@ def run(path='', code=None, options=None): fileconfig = dict() params = dict() linters = LINTERS - linter_params = dict() + linters_params = dict() if options: linters = options.linters - linter_params = options.linter_params + linters_params = options.linters_params for mask in options.file_params: if mask.match(path): fileconfig.update(options.file_params[mask]) @@ -45,6 +45,7 @@ def run(path='', code=None, options=None): with CodeContext(code, path) as ctx: code = ctx.code params = prepare_params(parse_modeline(code), fileconfig, options) + LOGGER.debug('Checking params: %s', params) if params.get('skip'): return errors @@ -59,10 +60,13 @@ def run(path='', code=None, options=None): if not linter: continue - LOGGER.info("Run %s", lname) - meta = linter_params.get(lname, dict()) - errors += [Error(filename=path, linter=lname, **e) - for e in linter.run(path, code=code, **meta)] + lparams = linters_params.get(lname, dict()) + LOGGER.info("Run %s %s", lname, lparams) + + for er in linter.run( + path, code=code, ignore=params.get("ignore", set()), + select=params.get("select", set()), params=lparams): + errors.append(Error(filename=path, linter=lname, **er)) except IOError as e: LOGGER.debug("IOError %s", e) diff --git a/pymode/libs/pylama/errors.py b/pymode/libs/pylama/errors.py index 58fcbef0..9e80d2a6 100644 --- a/pymode/libs/pylama/errors.py +++ b/pymode/libs/pylama/errors.py @@ -67,4 +67,4 @@ def get(self, name, default=None): def __repr__(self): return "" % (self.number, self.linter) -# pylama:ignore=W0622,D +# pylama:ignore=W0622,D,R0924 diff --git a/pymode/libs/pylama/libs/inirama.py b/pymode/libs/pylama/libs/inirama.py index 5c1dfd46..2437fd3c 100644 --- a/pymode/libs/pylama/libs/inirama.py +++ b/pymode/libs/pylama/libs/inirama.py @@ -16,6 +16,13 @@ """ from __future__ import unicode_literals, print_function + +__version__ = "0.8.0" +__project__ = "Inirama" +__author__ = "Kirill Klenov " +__license__ = "BSD" + + import io import re import logging @@ -66,12 +73,6 @@ def keys(self): iteritems = DictMixin.iteritems -__version__ = "0.7.0" -__project__ = "Inirama" -__author__ = "Kirill Klenov " -__license__ = "BSD" - - NS_LOGGER = logging.getLogger('inirama') @@ -169,7 +170,7 @@ class INIScanner(Scanner): ('SECTION', re.compile(r'\[[^]]+\]')), ('IGNORE', re.compile(r'[ \r\t\n]+')), ('COMMENT', re.compile(r'[;#].*')), - ('KEY', re.compile(r'[\w_]+\s*[:=].*')), + ('KEY_VALUE', re.compile(r'[^=\s]+\s*[:=].*')), ('CONTINUATION', re.compile(r'.*')) ] @@ -346,7 +347,7 @@ def parse(self, source, update=True, **params): name = None for token in scanner.tokens: - if token[0] == 'KEY': + if token[0] == 'KEY_VALUE': name, value = re.split('[=:]', token[1], 1) name, value = name.strip(), value.strip() if not update and name in self[section]: diff --git a/pymode/libs/pylama/lint/extensions.py b/pymode/libs/pylama/lint/extensions.py index cdf344be..6e0bc3d2 100644 --- a/pymode/libs/pylama/lint/extensions.py +++ b/pymode/libs/pylama/lint/extensions.py @@ -25,6 +25,7 @@ from pkg_resources import iter_entry_points for entry in iter_entry_points('pylama.linter'): - LINTERS[entry.name] = entry.load()() + if entry.name not in LINTERS: + LINTERS[entry.name] = entry.load()() except ImportError: pass diff --git a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py index e371bdb4..65f3b0b3 100644 --- a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py +++ b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py @@ -8,7 +8,7 @@ class Linter(BaseLinter): """ Mccabe code complexity. """ @staticmethod - def run(path, code=None, complexity=10, **meta): + def run(path, code=None, params=None, **meta): """ MCCabe code checking. :return list: List of errors. @@ -16,4 +16,5 @@ def run(path, code=None, complexity=10, **meta): """ from .mccabe import get_code_complexity + complexity = params.get('complexity', 10) return get_code_complexity(code, complexity, filename=path) or [] diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8/__init__.py index 4e83a425..a0a4ecb7 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pep8/__init__.py @@ -13,13 +13,13 @@ class Linter(BaseLinter): """ PEP8 code check. """ @staticmethod - def run(path, code=None, **options): + def run(path, code=None, params=None, **meta): """ PEP8 code checking. :return list: List of errors. """ - P8Style = StyleGuide(reporter=_PEP8Report, **options) + P8Style = StyleGuide(reporter=_PEP8Report, **params) buf = StringIO(code) return P8Style.input_file(path, lines=buf.readlines()) diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py index 4ecf0829..72fc26fe 100644 --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py @@ -21,20 +21,22 @@ def __init__(self): monkey_patch_messages(checker.messages) @staticmethod - def run(path, code=None, builtins="", **meta): + def run(path, code=None, params=None, **meta): """ Pyflake code checking. :return list: List of errors. """ import _ast - import os - os.environ.setdefault('PYFLAKES_BUILTINS', builtins) + builtins = params.get("builtins", "") + + if builtins: + builtins = builtins.split(",") errors = [] tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST) - w = checker.Checker(tree, path) + w = checker.Checker(tree, path, builtins=builtins) w.messages = sorted(w.messages, key=lambda m: m.lineno) for w in w.messages: errors.append(dict( diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py index 6c12b39e..6ec4f3ba 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py @@ -4,7 +4,7 @@ # ================== -__version__ = "0.2.1" +__version__ = "0.3.1" __project__ = "pylama_pylint" __author__ = "horneds " __license__ = "BSD" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py index ef8e7635..40a614f8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py @@ -516,16 +516,16 @@ def visit_function(self, node, parent): frame = newnode.parent.frame() if isinstance(frame, new.Class): if newnode.name == '__new__': - newnode.type = 'classmethod' + newnode._type = 'classmethod' else: - newnode.type = 'method' + newnode._type = 'method' if newnode.decorators is not None: for decorator_expr in newnode.decorators.nodes: if isinstance(decorator_expr, new.Name): if decorator_expr.name in ('classmethod', 'staticmethod'): - newnode.type = decorator_expr.name + newnode._type = decorator_expr.name elif decorator_expr.name == 'classproperty': - newnode.type = 'classmethod' + newnode._type = 'classmethod' frame.set_local(newnode.name, newnode) return newnode diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py index 20bb664f..889baa0e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py +++ b/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py @@ -31,13 +31,13 @@ from cStringIO import StringIO as BytesIO from logilab.common.compat import builtins -from logilab.common.decorators import cached +from logilab.common.decorators import cached, cachedproperty from astroid.exceptions import NotFoundError, \ AstroidBuildingException, InferenceError from astroid.node_classes import Const, DelName, DelAttr, \ Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \ - LookupMixIn, const_factory as cf, unpack_infer + LookupMixIn, const_factory as cf, unpack_infer, Name from astroid.bases import NodeNG, InferenceContext, Instance,\ YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \ BUILTINS @@ -476,6 +476,34 @@ class ListComp(_ListComp): """class representing a ListComp node""" # Function ################################################################### + +def _function_type(self): + """ + Function type, possible values are: + method, function, staticmethod, classmethod. + """ + # Can't infer that this node is decorated + # with a subclass of `classmethod` where `type` is first set, + # so do it here. + if self.decorators: + for node in self.decorators.nodes: + if not isinstance(node, Name): + continue + try: + for infered in node.infer(): + if not isinstance(infered, Class): + continue + for ancestor in infered.ancestors(): + if isinstance(ancestor, Class): + if (ancestor.name == 'classmethod' and + ancestor.root().name == BUILTINS): + return 'classmethod' + elif (ancestor.name == 'staticmethod' and + ancestor.root().name == BUILTINS): + return 'staticmethod' + except InferenceError: + pass + return self._type class Lambda(LocalsDictNodeNG, FilterStmtsMixin): @@ -539,6 +567,8 @@ class Function(Statement, Lambda): # attributes below are set by the builder module or by raw factories blockstart_tolineno = None decorators = None + _type = "function" + type = cachedproperty(_function_type) def __init__(self, name, doc): self.locals = {} diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py index 60634022..411ba31d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/main.py +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py @@ -1,6 +1,7 @@ """ Pylint support. """ from os import path as op, environ import sys +import logging from pylama.lint import Linter as BaseLinter @@ -11,7 +12,11 @@ from pylint.lint import Run from pylint.reporters import BaseReporter -PYLINT_RC = op.abspath(op.join(CURDIR, 'pylint.rc')) +HOME_RCFILE = op.abspath(op.join(environ.get('HOME', ''), '.pylintrc')) +LAMA_RCFILE = op.abspath(op.join(CURDIR, 'pylint.rc')) + + +logger = logging.getLogger('pylama') class Linter(BaseLinter): @@ -19,12 +24,13 @@ class Linter(BaseLinter): """ Check code with pylint. """ @staticmethod - def run(path, **meta): # noqa + def run(path, code, params=None, ignore=None, select=None, **meta): """ Pylint code checking. :return list: List of errors. """ + logger.debug('Start pylint') MANAGER.astroid_cache.clear() @@ -46,12 +52,62 @@ def add_message(self, msg_id, location, msg): type=msg_id[0] )) - pylintrc = op.join(environ.get('HOME', ''), '.pylintrc') - defattrs = '-r n' - if not op.exists(pylintrc): - defattrs += ' --rcfile={0}'.format(PYLINT_RC) - attrs = meta.get('pylint', defattrs.split()) + params = _Params(ignore=ignore, select=select, params=params) + logger.debug(params) runner = Run( - [path] + attrs, reporter=Reporter(), exit=False) + [path] + params.to_attrs(), reporter=Reporter(), exit=False) + return runner.linter.reporter.errors + + +class _Params(object): + + """ Store pylint params. """ + + def __init__(self, select=None, ignore=None, params=None): + + params = dict(params.items()) + rcfile = params.get('rcfile', LAMA_RCFILE) + enable = params.get('enable', None) + disable = params.get('disable', None) + + if op.exists(HOME_RCFILE): + rcfile = HOME_RCFILE + + if select: + enable = select | set(enable.split(",") if enable else []) + + if ignore: + disable = ignore | set(disable.split(",") if disable else []) + + params.update(dict( + report=params.get('report', False), rcfile=rcfile, + enable=enable, disable=disable)) + + self.params = dict( + (name.replace('_', '-'), self.prepare_value(value)) + for name, value in params.items() if value is not None) + + @staticmethod + def prepare_value(value): + """ Prepare value to pylint. """ + if isinstance(value, (list, tuple, set)): + return ",".join(value) + + if isinstance(value, bool): + return "y" if value else "n" + + return str(value) + + def to_attrs(self): + """ Convert to argument list. """ + return ["--%s=%s" % item for item in self.params.items()] + + def __str__(self): + return " ".join(self.to_attrs()) + + def __repr__(self): + return "" % self + +# pylama:ignore=W0403 diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py index 84f92eaf..7e0f3fca 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py @@ -153,6 +153,8 @@ def visit_raise(self, node): except astroid.InferenceError: pass else: + if cause is YES: + return if isinstance(cause, astroid.Const): if cause.value is not None: self.add_message('bad-exception-context', diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py index 79774def..25f7612e 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py +++ b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py @@ -139,7 +139,7 @@ class should be ignored. A mixin class is detected if its name ends with \ 'metavar': '', 'help': 'List of module names for which member attributes \ should not be checked (useful for modules/projects where namespaces are \ -manipulated during runtime and thus extisting member attributes cannot be \ +manipulated during runtime and thus existing member attributes cannot be \ deduced by static analysis'}, ), ('ignored-classes', From 00c6c75bb2624dfc08c1fc57ad91abb9f3b2fdf8 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 16 Jun 2014 19:00:58 +0400 Subject: [PATCH 109/428] Full tunning with code checkers. Also added g:pymode_options_max_line_length --- Changelog.rst | 5 ++++ autoload/pymode/tools/loclist.vim | 1 + doc/pymode.txt | 41 ++++++++++++++++++++++++++++++- ftplugin/python/pymode.vim | 5 +++- plugin/pymode.vim | 15 +++++++++++ pymode/environment.py | 22 ++++++++++++----- pymode/lint.py | 15 ++++++++--- 7 files changed, 93 insertions(+), 11 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 6ae3e35d..ac48fccb 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -2,6 +2,11 @@ Changelog ========= * Pylama updated to version 5.0.5 +* Add 'pymode_options_max_line_length' option +* Add ability to set related checker options `:help pymode-lint-options` + Options added: 'pymode_lint_options_pep8', 'pymode_lint_options_pep257', + 'pymode_lint_options_mccabe', 'pymode_lint_options_pyflakes', + 'pymode_lint_options_pylint' ## 2014-06-11 0.8.1 ------------------- diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim index 4e960fe8..3a07fa3a 100644 --- a/autoload/pymode/tools/loclist.vim +++ b/autoload/pymode/tools/loclist.vim @@ -71,6 +71,7 @@ fun! g:PymodeLocList.show() "{{{ else let num = winnr() lopen + setl nowrap execute max([min([line("$"), g:pymode_quickfix_maxheight]), g:pymode_quickfix_minheight]) . "wincmd _" if num != winnr() call setwinvar(winnr(), 'quickfix_title', self._title . ' <' . self._name . '>') diff --git a/doc/pymode.txt b/doc/pymode.txt index ae2a5d27..8f47faf0 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -22,6 +22,7 @@ CONTENTS *pymode-contents* 2.7 Run code................................................|pymode-run| 2.8 Breakpoints.....................................|pymode-breakpoints| 3. Code checking...............................................|pymode-lint| + 3.1 Code checkers options..........................|pymode-lint-options| 4. Rope support................................................|pymode-rope| 4.1 Code completion..................................|pymode-completion| 4.2 Find definition.................................|pymode-rope-findit| @@ -89,7 +90,7 @@ Value is list of path's strings. > let g:pymode_paths = [] -Trim unused white spaces on save *'g:pymode_trim_whitespaces'* +Trim unused white spaces on save *'g:pymode_trim_whitespaces'* > let g:pymode_trim_whitespaces = 1 @@ -97,6 +98,10 @@ Setup default python options *'g:pymode_options'* > let g:pymode_options = 1 +Setup max line length *'g:pymode_options_max_line_length'* +> + let g:pymode_options_max_line_length = 79 + If this option is set to 1, pymode will enable the following options for python buffers: > @@ -330,6 +335,40 @@ Definitions for |signs| let g:pymode_lint_info_symbol = 'II' let g:pymode_lint_pyflakes_symbol = 'FF' +------------------------------------------------------------------------------ +3.1 Set code checkers options ~ + *pymode-lint-options* + +Pymode has the ability to set code checkers options from pymode variables: + +Set PEP8 options *'g:pymode_lint_options_pep8'* +> + let g:pymode_lint_options_pep8 = + \ {'max_line_length': g:pymode_options_max_line_length}) + +See https://pep8.readthedocs.org/en/1.4.6/intro.html#configuration for more +info. + +Set Pyflakes options *'g:pymode_lint_options_pyflakes'* +> + let g:pymode_lint_options_pyflakes = { 'builtins': '_' } + +Set mccabe options *'g:pymode_lint_options_mccabe'* +> + let g:pymode_lint_options_mccabe = { 'complexity': 12 } + +Set pep257 options *'g:pymode_lint_options_pep257'* +> + let g:pymode_lint_options_pep257 = {} + +Set pylint options *'g:pymode_lint_options_pylint'* +> + let g:pymode_lint_options_pylint = + \ {'max-line-length': g:pymode_options_max_line_length}) + +See http://docs.pylint.org/features.html#options for more info. + + ============================================================================== 3. Rope support ~ diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index d5fe8aba..c089679d 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -56,7 +56,10 @@ if g:pymode_options setlocal number endif setlocal nowrap - setlocal textwidth=79 + exe "setlocal textwidth=" . g:pymode_options_max_line_length + if exists('+colorcolumn') + setlocal colorcolumn=+1 + endif setlocal commentstring=#%s setlocal define=^\s*\\(def\\\\|class\\) endif diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 12156402..e78ab4b1 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -47,6 +47,7 @@ call pymode#default("g:pymode_trim_whitespaces", 1) " Set recomended python options call pymode#default("g:pymode_options", 1) +call pymode#default("g:pymode_options_max_line_length", 80) " Minimal height of pymode quickfix window call pymode#default('g:pymode_quickfix_maxheight', 6) @@ -127,6 +128,20 @@ call pymode#default("g:pymode_lint_error_symbol", "EE") call pymode#default("g:pymode_lint_info_symbol", "II") call pymode#default("g:pymode_lint_pyflakes_symbol", "FF") +" Code checkers options +call pymode#default("g:pymode_lint_options_pep8", + \ {'max_line_length': g:pymode_options_max_line_length}) + +call pymode#default("g:pymode_lint_options_pylint", + \ {'max-line-length': g:pymode_options_max_line_length}) + +call pymode#default("g:pymode_lint_options_mccabe", + \ {'complexity': 12}) + +call pymode#default("g:pymode_lint_options_pep257", {}) +call pymode#default("g:pymode_lint_options_pyflakes", { 'builtins': '_' }) + + " }}} " SET/UNSET BREAKPOINTS {{{ diff --git a/pymode/environment.py b/pymode/environment.py index 39b708e9..b5b2b353 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -58,13 +58,19 @@ def lines(self): return [l.decode(self.options.get('encoding')) for l in self.curbuf] - def var(self, name, to_bool=False): + @staticmethod + def var(name, to_bool=False, silence=False): """ Get vim variable. :return vimobj: """ - value = vim.eval(name) + try: + value = vim.eval(name) + except vim.error: + if silence: + return None + raise if to_bool: try: @@ -73,7 +79,8 @@ def var(self, name, to_bool=False): value = value return value - def message(self, msg, history=False): + @staticmethod + def message(msg, history=False): """ Show message to user. :return: :None @@ -139,7 +146,8 @@ def user_input_choices(self, msg, *options): self.error('Invalid option: %s' % input_str) return self.user_input_choices(msg, *options) - def error(self, msg): + @staticmethod + def error(msg): """ Show error to user. """ vim.command('call pymode#error("%s")' % str(msg)) @@ -218,7 +226,8 @@ def get_offset_params(self, cursor=None, base=""): env.debug('Get offset', base or None, row, col, offset) return source, offset - def goto_line(self, line): + @staticmethod + def goto_line(line): """ Go to line. """ vim.command('normal %sggzz' % line) @@ -228,7 +237,8 @@ def goto_file(self, path, cmd='e', force=False): self.debug('read', path) vim.command("%s %s" % (cmd, path)) - def goto_buffer(self, bufnr): + @staticmethod + def goto_buffer(bufnr): """ Open buffer. """ if str(bufnr) != '-1': vim.command('buffer %s' % bufnr) diff --git a/pymode/lint.py b/pymode/lint.py index ab210a90..5053f7fa 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -20,12 +20,21 @@ def code_check(): if not env.curbuf.name: return env.stop() + linters = env.var('g:pymode_lint_checkers') + env.debug(linters) + options = parse_options( + linters=linters, force=1, ignore=env.var('g:pymode_lint_ignore'), select=env.var('g:pymode_lint_select'), - linters=env.var('g:pymode_lint_checkers'), - force=1, ) + + for linter in linters: + opts = env.var('g:pymode_lint_options_%s' % linter, silence=True) + if opts: + options.linters_params[linter] = options.linters_params.get(linter, {}) + options.linters_params[linter].update(opts) + env.debug(options) path = os.path.relpath(env.curbuf.name, env.curdir) @@ -61,4 +70,4 @@ def __sort(e): env.run('g:PymodeLocList.current().extend', [e._info for e in errors]) -# pylama:ignore=W0212 +# pylama:ignore=W0212,E1103 From 18370c773fa00395d7676bc820f193c2426a6609 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 17 Jun 2014 17:07:32 +0400 Subject: [PATCH 110/428] Fix undefined b:pymode_modified variable --- ftplugin/python/pymode.vim | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index c089679d..72c963ca 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -2,6 +2,8 @@ if !g:pymode || pymode#default('b:pymode', 1) finish endif +let b:pymode_modified = &modified + " Init paths if !pymode#default('g:pymode_init', 1) call pymode#init(expand(':p:h:h:h'), g:pymode_paths) From 6fb256890cb4d386983551167d7e3faa60f29f03 Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Wed, 25 Jun 2014 13:07:27 +0200 Subject: [PATCH 111/428] Minor fixes to indent documentation --- autoload/pymode/indent.vim | 2 +- doc/pymode.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/indent.vim b/autoload/pymode/indent.vim index 9bbe0143..d8e9f148 100644 --- a/autoload/pymode/indent.vim +++ b/autoload/pymode/indent.vim @@ -4,7 +4,7 @@ " Prev Maintainer: Eric Mc Sween (address invalid) " Original Author: David Bustos (address invalid) " Last Change: 2012-06-21 -" License: Public Domainlet +" License: Public Domain function! pymode#indent#get_indent(lnum) diff --git a/doc/pymode.txt b/doc/pymode.txt index 8f47faf0..0200c1aa 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -146,7 +146,7 @@ Set value to `python3` if you are working with python3 projects. You could use Pymode supports PEP8-compatible python indent. Enable pymode indentation *'g:pymode_indent'* > - let g:pymode_indent = [] + let g:pymode_indent = 1 ------------------------------------------------------------------------------ 2.3 Python folding ~ From 71c529f8d142381154b03bd09ebe1dc11cc5a5ec Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Wed, 25 Jun 2014 13:21:07 +0200 Subject: [PATCH 112/428] indent: look at and set b:did_indent Fixes https://github.com/klen/python-mode/issues/424 --- after/indent/python.vim | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/after/indent/python.vim b/after/indent/python.vim index df0f1004..3b1a620e 100644 --- a/after/indent/python.vim +++ b/after/indent/python.vim @@ -1,7 +1,8 @@ -if !g:pymode || !g:pymode_indent +if !g:pymode || !g:pymode_indent || exists("b:did_indent") finish endif +let b:did_indent = 1 setlocal nolisp setlocal tabstop=4 From a62f8ab7d7ee70717403e3556040f948160ff04b Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 7 Jul 2014 13:20:05 +0400 Subject: [PATCH 113/428] Repair indent back --- after/indent/python.vim | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/after/indent/python.vim b/after/indent/python.vim index 3b1a620e..98399b40 100644 --- a/after/indent/python.vim +++ b/after/indent/python.vim @@ -1,9 +1,7 @@ -if !g:pymode || !g:pymode_indent || exists("b:did_indent") +if !g:pymode || !g:pymode_indent finish endif -let b:did_indent = 1 - setlocal nolisp setlocal tabstop=4 setlocal softtabstop=4 From 1e95b8b413577e8e0fc4691a66c45f160946a4ab Mon Sep 17 00:00:00 2001 From: Kurtis Rader Date: Wed, 9 Jul 2014 20:55:22 -0700 Subject: [PATCH 114/428] Fix issue #447. --- pymode/libs/pylama/lint/pylama_pep8/pep8.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pylama/lint/pylama_pep8/pep8.py index b31a9781..10a3a155 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pylama/lint/pylama_pep8/pep8.py @@ -199,6 +199,7 @@ def maximum_line_length(physical_line, max_line_length, multiline): Reports error E501. """ + max_line_length = int(max_line_length) line = physical_line.rstrip() length = len(line) if length > max_line_length and not noqa(line): From 559240f26a18c934c717c0472a2e9233812fcadc Mon Sep 17 00:00:00 2001 From: Kurtis Rader Date: Thu, 10 Jul 2014 19:09:41 -0700 Subject: [PATCH 115/428] Fix mismatched type exception thrown by mccabe.py. This bug is a variation of the same one reported in issue #447. Commit 00c6c75b on 2014-06-16 introduced this statement in plugin/pymode.vim: call pymode#default("g:pymode_lint_options_mccabe", \ {'complexity': 12}) That results in pymode/libs/pylama/lint/pylama_mccabe/mccabe.py throwing this exception: Run mccabe {'complexity': '12'} Traceback (most recent call last): File "/Users/krader/Dropbox/dotfiles/vim/bundle/python-mode/pymode/libs/pylama/core.py", line 68, in run select=params.get("select", set()), params=lparams): File "/Users/krader/Dropbox/dotfiles/vim/bundle/python-mode/pymode/libs/pylama/lint/pylama_mccabe/__init__.py", line 20, in run return get_code_complexity(code, complexity, filename=path) or [] File "/Users/krader/Dropbox/dotfiles/vim/bundle/python-mode/pymode/libs/pylama/lint/pylama_mccabe/mccabe.py", line 267, in get_code_complexity for lineno, offset, text, check in McCabeChecker(tree, filename).run(): File "/Users/krader/Dropbox/dotfiles/vim/bundle/python-mode/pymode/libs/pylama/lint/pylama_mccabe/mccabe.py", line 247, in run if self.max_complexity < 0: TypeError: unorderable types: str() < int() --- pymode/libs/pylama/lint/pylama_mccabe/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py index 65f3b0b3..da8b5f2a 100644 --- a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py +++ b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py @@ -16,5 +16,5 @@ def run(path, code=None, params=None, **meta): """ from .mccabe import get_code_complexity - complexity = params.get('complexity', 10) + complexity = int(params.get('complexity', 10)) return get_code_complexity(code, complexity, filename=path) or [] From 58bc3845d88682d26984c4b5c5b2fdc3c21d2063 Mon Sep 17 00:00:00 2001 From: Wayne Ye Date: Fri, 11 Jul 2014 17:59:22 +0800 Subject: [PATCH 116/428] Revise readme to guide users customize key bindings --- README.rst | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 3f2e050c..249b0ad2 100644 --- a/README.rst +++ b/README.rst @@ -107,7 +107,7 @@ Install with commands: apt-get update apt-get install vim-python-mode -If you are getting the message: "The following signatures couldn' be verified because the public key is not available": :: +If you are getting the message: "The following signatures couldn't be verified because the public key is not available": :: apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B5DF65307000E266 @@ -126,7 +126,7 @@ If your python-mode doesn't work: vim -u /debug.vim -And try to repeat your case. If no error occurs, seems like problem isnt in the +And try to repeat your case. If no error occurs, seems like problem isn't in the plugin. 2. Type `:PymodeTroubleshooting` @@ -136,6 +136,21 @@ creating a `new github issue `_ if one does not already exist for the problem). +Customization +============= + +You can override the default key bindings by redefining them in your `.vimrc`, for example: :: + + " Override go-to.definition key shortcut to Ctrl-] + let g:pymode_rope_goto_definition_bind = "" + + " Override run current python file key shortcut to Ctrl-Shift-e + let g:pymode_run_bind = "" + + " Override view python doc key shortcut to Ctrl-Shift-d + let g:pymode_doc_bind = "" + + Documentation ============= From 2d325c57d7047341c8b4830dd1c091fb51adfbe3 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 12 Jul 2014 22:22:17 +0400 Subject: [PATCH 117/428] Update contributors --- AUTHORS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index e1abde9b..376c2e41 100644 --- a/AUTHORS +++ b/AUTHORS @@ -23,6 +23,7 @@ Contributors: * Jay Rainey (https://github.com/jawrainey) * Jonathan McCall (http://github.com/Jonnymcc); * Kevin Deldycke (http://github.com/kdeldycke); +* Kurtis Rader (https://github.com/krader1961) * Lowe Thiderman (http://github.com/thiderman); * Martin Brochhaus (http://github.com/mbrochh); * Matthew Moses (http://github.com/mlmoses); @@ -36,6 +37,7 @@ Contributors: * Ronald Andreu Kaiser (http://github.com/cathoderay); * Sorin Ionescu (sorin-ionescu); * Steve Losh (http://github.com/sjl); +* Wayne Ye (https://github.com/WayneYe) * bendavis78 (http://github.com/bendavis78) * fwuzju (http://github.com/fwuzju) * lawrenceakka; From b2de6b0dbfe571443f679628958c899f6af95f16 Mon Sep 17 00:00:00 2001 From: Dennis Brakhane Date: Fri, 25 Jul 2014 14:17:43 +0200 Subject: [PATCH 118/428] add class hierarchy support to rename When a method is renamed, the user can choose "perform in class hierarchy". This will rename methods of the same name in super- and subclasses. For example, in the following class A(object): def foo(self): pass class A2(A): def foo(self): pass class B(object): def foo(self): pass A normal rename in A2's foo will not rename A's foo and vice versa. With the "in class hierarchy" mode, both will be renamed. In both cases, class B will not be altered, as it isn't a super- or subclass. Fixes #456 --- pymode/rope.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index 9888db71..f4190943 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -465,15 +465,19 @@ def run(self): if not input_str: return False - changes = self.get_changes(refactor, input_str) - action = env.user_input_choices( - 'Choose what to do:', 'perform', 'preview') + 'Choose what to do:', 'perform', 'preview', + 'perform in class hierarchy', + 'preview in class hierarchy') + + in_hierarchy = action.endswith("in class hierarchy") + + changes = self.get_changes(refactor, input_str, in_hierarchy) if not action: return False - if action == 'preview': + if action.startswith('preview'): print("\n ") print("-------------------------------") print("\n%s\n" % changes.get_description()) @@ -505,7 +509,7 @@ def get_input_str(refactor, ctx): return True @staticmethod - def get_changes(refactor, input_str): + def get_changes(refactor, input_str, in_hierarchy=False): """ Get changes. :return Changes: @@ -513,7 +517,7 @@ def get_changes(refactor, input_str): """ progress = ProgressHandler('Calculate changes ...') return refactor.get_changes( - input_str, task_handle=progress.handle) + input_str, task_handle=progress.handle, in_hierarchy = in_hierarchy) class RenameRefactoring(Refactoring): From 2966064a01d84c67092e2f490fcc5a1d63de9a50 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Sun, 27 Jul 2014 13:50:25 +0200 Subject: [PATCH 119/428] Fixed minor typo --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 0200c1aa..9f7fca16 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -74,7 +74,7 @@ Features: *pymode-features* This script provides the following options that can customizes the behavior of PythonMode. These options should be set in your |vimrc|. - Bellow shows the default values. + Below shows the default values. Turn on the whole plugin *'g:pymode'* From 0800b3cd5b72b09c109f162418e9fd1192a251c0 Mon Sep 17 00:00:00 2001 From: Marton Suranyi Date: Wed, 30 Jul 2014 16:21:30 +0200 Subject: [PATCH 120/428] let autopep8 to use g:pymode_options_max_line_length --- pymode/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/__init__.py b/pymode/__init__.py index 37691abb..d5e63ba3 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -18,7 +18,7 @@ class Options(object): in_place = True indent_size = int(vim.eval('&tabstop')) line_range = None - max_line_length = 79 + max_line_length = int(vim.eval('g:pymode_options_max_line_length')) pep8_passes = 100 recursive = False select = vim.eval('g:pymode_lint_select') From c0c9aea870059dd660304e103b3ded46be3cb1e0 Mon Sep 17 00:00:00 2001 From: Tommy Allen Date: Sun, 3 Aug 2014 15:16:28 -0400 Subject: [PATCH 121/428] RopeGotoDefinition works with filenames that contain spaces. --- pymode/environment.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pymode/environment.py b/pymode/environment.py index b5b2b353..43246cea 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -235,6 +235,8 @@ def goto_file(self, path, cmd='e', force=False): """ Function description. """ if force or os.path.abspath(path) != self.curbuf.name: self.debug('read', path) + if ' ' in path and os.name == 'posix': + path = path.replace(' ', '\\ ') vim.command("%s %s" % (cmd, path)) @staticmethod From 9c9fa8a1114e3f44d601f4402d6e7c2d5ee3f5e5 Mon Sep 17 00:00:00 2001 From: Pascal Date: Mon, 7 Jul 2014 11:29:36 -0400 Subject: [PATCH 122/428] Fix Vim hanging on buffer write (any filetype!) on Windows --- plugin/pymode.vim | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e78ab4b1..410b9dd5 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,4 +1,4 @@ -" vi: fdl=1 +" vi: fdl=1 let g:pymode_version = "0.8.1" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version @@ -170,7 +170,7 @@ call pymode#default('g:pymode_rope_current', '') call pymode#default('g:pymode_rope_project_root', '') " Configurable rope project folder (always relative to project root) -call pymode#default('g:pymode_rope_ropefolder', '.ropeproject') +call pymode#default('g:pymode_rope_ropefolder', '.ropeproject') " If project hasnt been finded in current working directory, look at parents directory call pymode#default('g:pymode_rope_lookup_project', 0) @@ -262,10 +262,6 @@ if &compatible endif filetype plugin on -if exists('+shellslash') - set shellslash -endif - " Disable python-related functionality " let g:pymode_python = 'disable' " let g:pymode_python = 'python3' From 96cbc8dbcee8b2168f000ece920d398217ff28d6 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 5 Aug 2014 23:08:43 +0100 Subject: [PATCH 123/428] Update AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 376c2e41..c5c900b6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -37,6 +37,7 @@ Contributors: * Ronald Andreu Kaiser (http://github.com/cathoderay); * Sorin Ionescu (sorin-ionescu); * Steve Losh (http://github.com/sjl); +* Tommy Allen (https://github.com/tweekmonster) * Wayne Ye (https://github.com/WayneYe) * bendavis78 (http://github.com/bendavis78) * fwuzju (http://github.com/fwuzju) From 1ca4274341196b0da1e26db9e274882cc7e79570 Mon Sep 17 00:00:00 2001 From: dannon Date: Thu, 4 Sep 2014 13:24:18 -0400 Subject: [PATCH 124/428] Add option for disabling colorcolumn. --- doc/pymode.txt | 4 ++++ ftplugin/python/pymode.vim | 2 +- plugin/pymode.vim | 1 + 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 9f7fca16..5cec03f9 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -115,6 +115,10 @@ python buffers: > setlocal commentstring=#%s setlocal define=^\s*\\(def\\\\|class\\) +Enable colorcolumn display at max_line_length *'g:pymode_options_colorcolumn'* +> + let g:pymode_options_colorcolumn = 1 + Setup pymode |quickfix| window *'g:pymode_quickfix_maxheight'* *'g:pymode_quickfix_minheight'* diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 72c963ca..3a19d286 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -59,7 +59,7 @@ if g:pymode_options endif setlocal nowrap exe "setlocal textwidth=" . g:pymode_options_max_line_length - if exists('+colorcolumn') + if g:pymode_options_colorcolumn && exists('+colorcolumn') setlocal colorcolumn=+1 endif setlocal commentstring=#%s diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 410b9dd5..d63fa59b 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -48,6 +48,7 @@ call pymode#default("g:pymode_trim_whitespaces", 1) " Set recomended python options call pymode#default("g:pymode_options", 1) call pymode#default("g:pymode_options_max_line_length", 80) +call pymode#default("g:pymode_options_colorcolumn", 1) " Minimal height of pymode quickfix window call pymode#default('g:pymode_quickfix_maxheight', 6) From 88dbf84c3e5f01dc9352240a75aa3c31438604cd Mon Sep 17 00:00:00 2001 From: Brendan Maguire Date: Fri, 10 Oct 2014 16:32:08 +0100 Subject: [PATCH 125/428] Fixed some issues around the auto_import_cache * fixes #361 * pymode_rope_autoimport_modules was never coming into effect during RopeContext __init__ * rope.regenerate now generates the cache for the entire project instead of just one file * StringIO was been imported as a class, but been used as a module --- pymode/rope.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index f4190943..01e153e4 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -221,7 +221,7 @@ def regenerate(): """ Clear cache. """ with RopeContext() as ctx: ctx.project.pycore._invalidate_resource_cache(ctx.resource) # noqa - ctx.importer.generate_cache(resources=[ctx.resource]) + ctx.importer.generate_cache() ctx.project.sync() @@ -372,7 +372,7 @@ def __init__(self, path, project_path): if os.path.exists("%s/__init__.py" % project_path): sys.path.append(project_path) - if self.options.get('autoimport') == '1': + if self.options.get('autoimport'): self.generate_autoimport_cache() env.debug('Context init', project_path) @@ -409,8 +409,8 @@ def _update_cache(importer, modules=None): importer.generate_modules_cache(modules) importer.project.sync() - sys.stdout, stdout_ = StringIO.StringIO(), sys.stdout - sys.stderr, stderr_ = StringIO.StringIO(), sys.stderr + sys.stdout, stdout_ = StringIO(), sys.stdout + sys.stderr, stderr_ = StringIO(), sys.stderr process = multiprocessing.Process(target=_update_cache, args=( self.importer, modules)) process.start() From 7c8b25aa39e2b7fbbb4d379d08ab5198f5c6c718 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 16 Oct 2014 15:59:27 +0400 Subject: [PATCH 126/428] Update AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index c5c900b6..2eec4764 100644 --- a/AUTHORS +++ b/AUTHORS @@ -12,6 +12,7 @@ Contributors: * Benjamin Ruston (http://github.com/bruston); * Boris Filippov (http://github.com/frenzykryger); * Brad Mease (http://github.com/bmease) +* Brendan Maguire (https://github.com/brendanmaguire) * Daniel Hahler (http://github.com/blueyed) * David Vogt (http://github.com/winged); * Denis Kasak (http://github.com/dkasak); From 154ec68136d57c2c883ac0948ca98f90077d5676 Mon Sep 17 00:00:00 2001 From: Yongzhi Pan Date: Wed, 22 Oct 2014 13:09:49 +0800 Subject: [PATCH 127/428] Fix typo. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 249b0ad2..b6f9bb69 100644 --- a/README.rst +++ b/README.rst @@ -56,7 +56,7 @@ Requirements How to install ============== -Using pathogen (recomended) +Using pathogen (recommended) ---------------------------- :: From fce811d0467fad1d70eccec9694fe3d0a312faf3 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Sun, 16 Nov 2014 21:37:51 -0700 Subject: [PATCH 128/428] Add folding support for multi-line decorators --- autoload/pymode/folding.vim | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 78cf8486..8844bc80 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -15,9 +15,12 @@ endif fun! pymode#folding#text() " {{{ let fs = v:foldstart - while getline(fs) =~ '\%(^\s*@\)\|\%(^\s*\%("""\|''''''\)\s*$\)' + while getline(fs) !~ s:def_regex && getline(fs) !~ s:doc_begin_regex let fs = nextnonblank(fs + 1) endwhile + if getline(fs) =~ s:doc_begin_regex + let fs = nextnonblank(fs + 1) + endif let line = getline(fs) let nucolwidth = &fdc + &number * &numberwidth @@ -41,8 +44,24 @@ fun! pymode#folding#expr(lnum) "{{{ let indent = indent(a:lnum) let prev_line = getline(a:lnum - 1) - if line =~ s:def_regex || line =~ s:decorator_regex - if prev_line =~ s:decorator_regex + if line =~ s:decorator_regex + return ">".(indent / &shiftwidth + 1) + endif + + if line =~ s:def_regex + " Check if last decorator is before the last def + let decorated = 0 + let lnum = a:lnum - 1 + while lnum > 0 + if getline(lnum) =~ s:def_regex + break + elseif getline(lnum) =~ s:decorator_regex + let decorated = 1 + break + endif + let lnum -= 1 + endwhile + if decorated return '=' else return ">".(indent / &shiftwidth + 1) From d113fe0a30302a450db0fe04e775025d39145e8f Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Mon, 17 Nov 2014 14:01:49 -0700 Subject: [PATCH 129/428] Handle folding of nested defs correctly --- autoload/pymode/folding.vim | 15 +++++++++++++++ autoload/pymode/motion.vim | 10 +++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 78cf8486..c7d7f769 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -57,6 +57,21 @@ fun! pymode#folding#expr(lnum) "{{{ return "<".(indent / &shiftwidth + 1) endif + " Handle nested defs + let last_def = pymode#motion#BlockStart(a:lnum, s:def_regex) + if getline(last_def) =~ s:def_regex + let last_def_end = pymode#motion#BlockEnd(last_def) + if last_def_end < line('$') + let nested = getline(pymode#motion#BlockStart(last_def - 1)) =~ s:def_regex + if nested && getline(nextnonblank(a:lnum)) !~ s:def_regex + let fold_end = min([prevnonblank(last_def_end - 1) + 2, last_def_end]) + if a:lnum == fold_end + return 's1' + endif + endif + endif + endif + if line =~ s:blank_regex if prev_line =~ s:blank_regex if indent(a:lnum + 1) == 0 && getline(a:lnum + 1) !~ s:blank_regex diff --git a/autoload/pymode/motion.vim b/autoload/pymode/motion.vim index 67e99e6b..1ddfa491 100644 --- a/autoload/pymode/motion.vim +++ b/autoload/pymode/motion.vim @@ -32,15 +32,15 @@ endfunction "}}} fun! pymode#motion#select(pattern, inner) "{{{ let cnt = v:count1 - 1 let orig = getpos('.')[1:2] - let snum = s:BlockStart(orig[0], a:pattern) + let snum = pymode#motion#BlockStart(orig[0], a:pattern) if getline(snum) !~ a:pattern return 0 endif - let enum = s:BlockEnd(snum, indent(snum)) + let enum = pymode#motion#BlockEnd(snum, indent(snum)) while cnt let lnum = search(a:pattern, 'nW') if lnum - let enum = s:BlockEnd(lnum, indent(lnum)) + let enum = pymode#motion#BlockEnd(lnum, indent(lnum)) call cursor(enum, 1) endif let cnt = cnt - 1 @@ -58,7 +58,7 @@ fun! pymode#motion#select(pattern, inner) "{{{ endfunction "}}} -fun! s:BlockStart(lnum, ...) "{{{ +fun! pymode#motion#BlockStart(lnum, ...) "{{{ let pattern = a:0 ? a:1 : '^\s*\(@\|class\s.*:\|def\s\)' let lnum = a:lnum + 1 let indent = 100 @@ -82,7 +82,7 @@ fun! s:BlockStart(lnum, ...) "{{{ endfunction "}}} -fun! s:BlockEnd(lnum, ...) "{{{ +fun! pymode#motion#BlockEnd(lnum, ...) "{{{ let indent = a:0 ? a:1 : indent(a:lnum) let lnum = a:lnum while lnum From 906eff560e0ab1ce037b6f7088ffb23aa9a4959a Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Mon, 17 Nov 2014 18:16:17 -0700 Subject: [PATCH 130/428] Make folding nested defs much faster --- autoload/pymode/folding.vim | 57 ++++++++++++++++++++++++++++++------- autoload/pymode/motion.vim | 10 +++---- 2 files changed, 52 insertions(+), 15 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index c7d7f769..03fbe695 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -58,18 +58,43 @@ fun! pymode#folding#expr(lnum) "{{{ endif " Handle nested defs - let last_def = pymode#motion#BlockStart(a:lnum, s:def_regex) - if getline(last_def) =~ s:def_regex - let last_def_end = pymode#motion#BlockEnd(last_def) - if last_def_end < line('$') - let nested = getline(pymode#motion#BlockStart(last_def - 1)) =~ s:def_regex - if nested && getline(nextnonblank(a:lnum)) !~ s:def_regex - let fold_end = min([prevnonblank(last_def_end - 1) + 2, last_def_end]) - if a:lnum == fold_end - return 's1' + if indent(prevnonblank(a:lnum)) + let curpos = getcurpos() + try + let last_block = s:BlockStart(a:lnum) + let last_block_indent = indent(last_block) + + " Check if last class/def is not indented and therefore can't be + " nested and make sure it is a class/def block instead of a zero + " indented regular statement + if last_block_indent && getline(last_block) =~ s:def_regex + " Note: This relies on the cursor position being set by s:BlockStart + let next_def = searchpos('^\s*def \w', 'nW')[0] + let next_def_indent = next_def ? indent(next_def) : -1 + let last_block_end = s:BlockEnd(last_block) + + " If the next def has the same or greater indent than the + " previous def, it is either nested at the same level or + " nested one level deeper, and in either case will have its + " own fold. If the class/def containing the current line is on + " the first line it can't be nested, and if the this block + " ends on the last line, it contains no trailing code that + " should not be folded. Otherwise, we know the current line + " is at the end of a nested def. + if next_def_indent < last_block_indent && last_block > 1 && last_block_end < line('$') + + " Include up to one blank line in the fold + let fold_end = min([prevnonblank(last_block_end - 1) + 1, last_block_end]) + if a:lnum == fold_end + return 's1' + else + return '=' + endif endif endif - endif + finally + call setpos('.', curpos) + endtry endif if line =~ s:blank_regex @@ -91,5 +116,17 @@ fun! pymode#folding#expr(lnum) "{{{ endfunction "}}} +fun! s:BlockStart(lnum) "{{{ + " Note: Make sure to reset cursor position after using this function. + call cursor(a:lnum, 0) + let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) + return searchpos('\v^(\s{,'.max_indent.'}(def |class |\@)\w|[^ \t#])', 'bcnW')[0] +endfunction "}}} + +fun! s:BlockEnd(lnum) "{{{ + " Note: Make sure to reset cursor position after using this function. + call cursor(a:lnum, 0) + return searchpos('\v^\s{,'.indent('.').'}\S', 'nW')[0] - 1 +endfunction "}}} " vim: fdm=marker:fdl=0 diff --git a/autoload/pymode/motion.vim b/autoload/pymode/motion.vim index 1ddfa491..67e99e6b 100644 --- a/autoload/pymode/motion.vim +++ b/autoload/pymode/motion.vim @@ -32,15 +32,15 @@ endfunction "}}} fun! pymode#motion#select(pattern, inner) "{{{ let cnt = v:count1 - 1 let orig = getpos('.')[1:2] - let snum = pymode#motion#BlockStart(orig[0], a:pattern) + let snum = s:BlockStart(orig[0], a:pattern) if getline(snum) !~ a:pattern return 0 endif - let enum = pymode#motion#BlockEnd(snum, indent(snum)) + let enum = s:BlockEnd(snum, indent(snum)) while cnt let lnum = search(a:pattern, 'nW') if lnum - let enum = pymode#motion#BlockEnd(lnum, indent(lnum)) + let enum = s:BlockEnd(lnum, indent(lnum)) call cursor(enum, 1) endif let cnt = cnt - 1 @@ -58,7 +58,7 @@ fun! pymode#motion#select(pattern, inner) "{{{ endfunction "}}} -fun! pymode#motion#BlockStart(lnum, ...) "{{{ +fun! s:BlockStart(lnum, ...) "{{{ let pattern = a:0 ? a:1 : '^\s*\(@\|class\s.*:\|def\s\)' let lnum = a:lnum + 1 let indent = 100 @@ -82,7 +82,7 @@ fun! pymode#motion#BlockStart(lnum, ...) "{{{ endfunction "}}} -fun! pymode#motion#BlockEnd(lnum, ...) "{{{ +fun! s:BlockEnd(lnum, ...) "{{{ let indent = a:0 ? a:1 : indent(a:lnum) let lnum = a:lnum while lnum From 391ece061adea41215ed662e6954b9e1c04722a6 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Mon, 17 Nov 2014 20:14:27 -0700 Subject: [PATCH 131/428] Fix edge case bug at end of top-level fold --- autoload/pymode/folding.vim | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 03fbe695..9a232960 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -79,14 +79,18 @@ fun! pymode#folding#expr(lnum) "{{{ " own fold. If the class/def containing the current line is on " the first line it can't be nested, and if the this block " ends on the last line, it contains no trailing code that - " should not be folded. Otherwise, we know the current line - " is at the end of a nested def. + " should not be folded. Finally, if the next non-blank line + " after the end of the previous def is less indented than the + " previous def, it is not part of the same fold as that def. + " Otherwise, we know the current line is at the end of a + " nested def. if next_def_indent < last_block_indent && last_block > 1 && last_block_end < line('$') + \ && indent(nextnonblank(last_block_end)) >= last_block_indent " Include up to one blank line in the fold let fold_end = min([prevnonblank(last_block_end - 1) + 1, last_block_end]) if a:lnum == fold_end - return 's1' + return next_def ? 's1' : 0 else return '=' endif From 5304abef6bf96e4cf107ac48f3bfaead006f8b52 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Tue, 18 Nov 2014 11:30:53 -0700 Subject: [PATCH 132/428] Fix more corner cases - Fix folding of code between two defs nested under the same def - Don't search for lines other than def or class in BlockStart because only def and class can contain nested folds - Fix pattern used to find next_def - Fix case where last_block_end is not followed by a blank line --- autoload/pymode/folding.vim | 39 +++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 9a232960..8434ef90 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -65,32 +65,33 @@ fun! pymode#folding#expr(lnum) "{{{ let last_block_indent = indent(last_block) " Check if last class/def is not indented and therefore can't be - " nested and make sure it is a class/def block instead of a zero - " indented regular statement - if last_block_indent && getline(last_block) =~ s:def_regex + " nested. + if last_block_indent " Note: This relies on the cursor position being set by s:BlockStart - let next_def = searchpos('^\s*def \w', 'nW')[0] + let next_def = searchpos(s:def_regex, 'nW')[0] let next_def_indent = next_def ? indent(next_def) : -1 let last_block_end = s:BlockEnd(last_block) - " If the next def has the same or greater indent than the - " previous def, it is either nested at the same level or - " nested one level deeper, and in either case will have its - " own fold. If the class/def containing the current line is on - " the first line it can't be nested, and if the this block - " ends on the last line, it contains no trailing code that - " should not be folded. Finally, if the next non-blank line - " after the end of the previous def is less indented than the - " previous def, it is not part of the same fold as that def. - " Otherwise, we know the current line is at the end of a - " nested def. - if next_def_indent < last_block_indent && last_block > 1 && last_block_end < line('$') + " If the next def has greater indent than the previous def, it + " is nested one level deeper and will have its own fold. If + " the class/def containing the current line is on the first + " line it can't be nested, and if this block ends on the last + " line, it contains no trailing code that should not be + " folded. Finally, if the next non-blank line after the end of + " the previous def is less indented than the previous def, it + " is not part of the same fold as that def. Otherwise, we know + " the current line is at the end of a nested def. + if next_def_indent <= last_block_indent && last_block > 1 && last_block_end < line('$') \ && indent(nextnonblank(last_block_end)) >= last_block_indent " Include up to one blank line in the fold - let fold_end = min([prevnonblank(last_block_end - 1) + 1, last_block_end]) + if getline(last_block_end) =~ s:blank_regex + let fold_end = min([prevnonblank(last_block_end - 1), last_block_end]) + 1 + else + let fold_end = last_block_end + endif if a:lnum == fold_end - return next_def ? 's1' : 0 + return 's1' else return '=' endif @@ -124,7 +125,7 @@ fun! s:BlockStart(lnum) "{{{ " Note: Make sure to reset cursor position after using this function. call cursor(a:lnum, 0) let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) - return searchpos('\v^(\s{,'.max_indent.'}(def |class |\@)\w|[^ \t#])', 'bcnW')[0] + return searchpos('\v^\s{,'.max_indent.'}(def |class )\w', 'bcnW')[0] endfunction "}}} fun! s:BlockEnd(lnum) "{{{ From 09dc5efccff21189518a6ba72bd18cd5e8a70963 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Tue, 18 Nov 2014 18:35:50 -0700 Subject: [PATCH 133/428] Add file length limit for checking for nested folds --- autoload/pymode/folding.vim | 5 +++-- plugin/pymode.vim | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 8434ef90..83ff887e 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -57,8 +57,9 @@ fun! pymode#folding#expr(lnum) "{{{ return "<".(indent / &shiftwidth + 1) endif - " Handle nested defs - if indent(prevnonblank(a:lnum)) + " Handle nested defs but only for files shorter than + " g:pymode_folding_nest_limit lines due to performance concerns + if line('$') < g:pymode_folding_nest_limit && indent(prevnonblank(a:lnum)) let curpos = getcurpos() try let last_block = s:BlockStart(a:lnum) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index d63fa59b..9bd4d95c 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -36,6 +36,8 @@ call pymode#default("g:pymode_indent", 1) " Enable/disable pymode folding for pyfiles. call pymode#default("g:pymode_folding", 1) +" Maximum file length to check for nested class/def statements +call pymode#default("g:pymode_folding_nest_limit", 1000) " Change for folding customization (by example enable fold for 'if', 'for') call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\) \w\+') From f2a03ec7e9c328eb7789082e49c8b07fcd527117 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Wed, 19 Nov 2014 12:21:27 -0700 Subject: [PATCH 134/428] Fix BlockStart when previous line is indented --- autoload/pymode/folding.vim | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 83ff887e..e5ea4082 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -68,7 +68,7 @@ fun! pymode#folding#expr(lnum) "{{{ " Check if last class/def is not indented and therefore can't be " nested. if last_block_indent - " Note: This relies on the cursor position being set by s:BlockStart + call cursor(a:lnum, 0) let next_def = searchpos(s:def_regex, 'nW')[0] let next_def_indent = next_def ? indent(next_def) : -1 let last_block_end = s:BlockEnd(last_block) @@ -125,7 +125,26 @@ endfunction "}}} fun! s:BlockStart(lnum) "{{{ " Note: Make sure to reset cursor position after using this function. call cursor(a:lnum, 0) - let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) + + " In case the end of the block is indented to a higher level than the def + " statement plus one shiftwidth, we need to find the indent level at the + " bottom of that if/for/try/while/etc. block. + let last_def = searchpos(s:def_regex, 'bcnW')[0] + if last_def + let last_def_indent = indent(last_def) + call cursor(last_def, 0) + let next_stmt_at_def_indent = searchpos('\v^\s{'.last_def_indent.'}[^[:space:]#]', 'nW')[0] + else + let next_stmt_at_def_indent = -1 + endif + + " Now find the class/def one shiftwidth lower than the start of the + " aforementioned indent block. + if next_stmt_at_def_indent && next_stmt_at_def_indent < a:lnum + let max_indent = max([indent(next_stmt_at_def_indent) - &shiftwidth, 0]) + else + let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) + endif return searchpos('\v^\s{,'.max_indent.'}(def |class )\w', 'bcnW')[0] endfunction "}}} From 1f9bb33e98c187663e6e49533421d78753060d6d Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Thu, 27 Nov 2014 00:54:03 +0300 Subject: [PATCH 135/428] Update authors --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 2eec4764..c09fe72d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -20,6 +20,7 @@ Contributors: * Florent Xicluna (http://github.com/florentx); * Fredrik Henrysson (http://github.com/fhenrysson); * Igor Guerrero (http://github.com/igorgue); +* Jacob Niehus (https://github.com/wilywampa) * Jason Harvey (http://github.com/alienth) * Jay Rainey (https://github.com/jawrainey) * Jonathan McCall (http://github.com/Jonnymcc); From dc801836a090bae84c6872af120d9dd8eacc1f01 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Fri, 28 Nov 2014 15:28:25 -0700 Subject: [PATCH 136/428] Replace getcurpos with getpos to support older Vim --- autoload/pymode/folding.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index a7ab47e9..93f18b09 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -79,7 +79,7 @@ fun! pymode#folding#expr(lnum) "{{{ " Handle nested defs but only for files shorter than " g:pymode_folding_nest_limit lines due to performance concerns if line('$') < g:pymode_folding_nest_limit && indent(prevnonblank(a:lnum)) - let curpos = getcurpos() + let curpos = getpos('.') try let last_block = s:BlockStart(a:lnum) let last_block_indent = indent(last_block) From a5f70a8da50524984c63a3b1588f7a23971da9fe Mon Sep 17 00:00:00 2001 From: Daniel Sullivan Date: Tue, 16 Dec 2014 16:08:39 -0500 Subject: [PATCH 137/428] Remove call to multiprocessing. Fixes issue #422. --- pymode/rope.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index 01e153e4..159900bf 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -409,12 +409,7 @@ def _update_cache(importer, modules=None): importer.generate_modules_cache(modules) importer.project.sync() - sys.stdout, stdout_ = StringIO(), sys.stdout - sys.stderr, stderr_ = StringIO(), sys.stderr - process = multiprocessing.Process(target=_update_cache, args=( - self.importer, modules)) - process.start() - sys.stdout, sys.stderr = stdout_, stderr_ + _update_cache(self.importer, modules) class ProgressHandler(object): From e2521aa6b7d9ee8594b3b49a442e3f569b45cae1 Mon Sep 17 00:00:00 2001 From: Tyler Fenby Date: Sun, 21 Dec 2014 17:37:43 -0500 Subject: [PATCH 138/428] Upgrade included rope libs to latest releases rope: 0.10.2 rope_py3k: 0.9.4-1 --- pymode/libs2/rope/__init__.py | 2 +- pymode/libs2/rope/base/arguments.py | 2 + pymode/libs2/rope/base/builtins.py | 67 ++++-- pymode/libs2/rope/base/change.py | 12 +- pymode/libs2/rope/base/codeanalyze.py | 16 +- pymode/libs2/rope/base/default_config.py | 12 +- pymode/libs2/rope/base/evaluate.py | 16 +- pymode/libs2/rope/base/fscommands.py | 3 +- pymode/libs2/rope/base/libutils.py | 67 +++++- pymode/libs2/rope/base/oi/doa.py | 11 +- pymode/libs2/rope/base/oi/runmod.py | 19 +- pymode/libs2/rope/base/oi/soa.py | 5 +- pymode/libs2/rope/base/oi/soi.py | 11 + pymode/libs2/rope/base/oi/transform.py | 12 +- pymode/libs2/rope/base/prefs.py | 2 +- pymode/libs2/rope/base/project.py | 118 +++++++++- pymode/libs2/rope/base/pycore.py | 134 +++-------- pymode/libs2/rope/base/pynames.py | 14 +- pymode/libs2/rope/base/pyobjectsdef.py | 41 ++-- pymode/libs2/rope/base/pyscopes.py | 7 +- pymode/libs2/rope/base/resourceobserver.py | 3 +- pymode/libs2/rope/base/resources.py | 56 ++++- pymode/libs2/rope/base/stdmods.py | 10 +- pymode/libs2/rope/base/taskhandle.py | 2 - pymode/libs2/rope/base/utils.py | 5 + pymode/libs2/rope/base/worder.py | 21 +- pymode/libs2/rope/contrib/autoimport.py | 29 ++- pymode/libs2/rope/contrib/codeassist.py | 119 +++++++--- pymode/libs2/rope/contrib/finderrors.py | 2 +- pymode/libs2/rope/contrib/findit.py | 24 +- pymode/libs2/rope/contrib/fixmodnames.py | 4 +- pymode/libs2/rope/contrib/fixsyntax.py | 28 ++- pymode/libs2/rope/contrib/generate.py | 43 ++-- pymode/libs2/rope/refactor/__init__.py | 4 +- .../libs2/rope/refactor/change_signature.py | 50 ++-- .../libs2/rope/refactor/encapsulate_field.py | 39 ++-- pymode/libs2/rope/refactor/extract.py | 39 ++-- pymode/libs2/rope/refactor/functionutils.py | 22 +- .../rope/refactor/importutils/__init__.py | 76 ++++--- .../rope/refactor/importutils/actions.py | 92 ++++---- .../rope/refactor/importutils/importinfo.py | 16 +- .../refactor/importutils/module_imports.py | 109 ++++++--- pymode/libs2/rope/refactor/inline.py | 160 +++++++------ .../libs2/rope/refactor/introduce_factory.py | 30 +-- .../rope/refactor/introduce_parameter.py | 9 +- pymode/libs2/rope/refactor/localtofield.py | 11 +- pymode/libs2/rope/refactor/method_object.py | 21 +- pymode/libs2/rope/refactor/move.py | 213 +++++++++++++----- pymode/libs2/rope/refactor/multiproject.py | 8 +- pymode/libs2/rope/refactor/occurrences.py | 104 ++++++--- pymode/libs2/rope/refactor/patchedast.py | 37 ++- pymode/libs2/rope/refactor/rename.py | 40 ++-- pymode/libs2/rope/refactor/restructure.py | 24 +- pymode/libs2/rope/refactor/similarfinder.py | 34 +-- pymode/libs2/rope/refactor/sourceutils.py | 9 +- pymode/libs2/rope/refactor/suites.py | 1 + pymode/libs2/rope/refactor/topackage.py | 8 +- pymode/libs2/rope/refactor/usefunction.py | 15 +- pymode/libs2/rope/refactor/wildcards.py | 6 +- pymode/libs3/rope/__init__.py | 2 +- pymode/libs3/rope/refactor/patchedast.py | 26 +-- pymode/libs3/rope/refactor/suites.py | 9 - 62 files changed, 1331 insertions(+), 800 deletions(-) diff --git a/pymode/libs2/rope/__init__.py b/pymode/libs2/rope/__init__.py index 19466380..c8e11f68 100644 --- a/pymode/libs2/rope/__init__.py +++ b/pymode/libs2/rope/__init__.py @@ -1,7 +1,7 @@ """rope, a python refactoring library""" INFO = __doc__ -VERSION = '0.9.4' +VERSION = '0.10.2' COPYRIGHT = """\ Copyright (C) 2006-2012 Ali Gholami Rudi Copyright (C) 2009-2012 Anton Gritsay diff --git a/pymode/libs2/rope/base/arguments.py b/pymode/libs2/rope/base/arguments.py index 342e2ae5..7ba43640 100644 --- a/pymode/libs2/rope/base/arguments.py +++ b/pymode/libs2/rope/base/arguments.py @@ -72,6 +72,8 @@ def get_pynames(self, parameters): def get_instance_pyname(self): return self.pynames[0] + + class MixedArguments(object): def __init__(self, pyname, arguments, scope): diff --git a/pymode/libs2/rope/base/builtins.py b/pymode/libs2/rope/base/builtins.py index 78e7afb0..5bb84859 100644 --- a/pymode/libs2/rope/base/builtins.py +++ b/pymode/libs2/rope/base/builtins.py @@ -149,8 +149,10 @@ def _get_builtin(*args): return cls._generated[args] return _get_builtin + def _create_builtin_getter(cls): type_getter = _create_builtin_type_getter(cls) + def _get_builtin(*args): return pyobjects.PyObject(type_getter(*args)) return _get_builtin @@ -233,7 +235,7 @@ def __call__(self, name, returned=None, function=None, except AttributeError: if check_existence: raise - builtin=None + builtin = None self.attributes[name] = BuiltinName( BuiltinFunction(returned=returned, function=function, argnames=argnames, builtin=builtin)) @@ -252,7 +254,8 @@ def __init__(self, holding=None): collector('__new__', function=self._new_list) # Adding methods - collector('append', function=self._list_add, argnames=['self', 'value']) + collector('append', function=self._list_add, + argnames=['self', 'value']) collector('__setitem__', function=self._list_add, argnames=['self', 'index', 'value']) collector('insert', function=self._list_add, @@ -306,7 +309,6 @@ class Dict(BuiltinClass): def __init__(self, keys=None, values=None): self.keys = keys self.values = values - item = get_tuple(self.keys, self.values) collector = _AttributeCollector(dict) collector('__new__', function=self._new_dict) collector('__setitem__', function=self._dict_add) @@ -327,7 +329,8 @@ def do_create(holding=None): if holding is None: return get_dict() type = holding.get_type() - if isinstance(type, Tuple) and len(type.get_holding_objects()) == 2: + if isinstance(type, Tuple) and \ + len(type.get_holding_objects()) == 2: return get_dict(*type.get_holding_objects()) return _create_builtin(args, do_create) @@ -384,7 +387,7 @@ def _self_set(self, context): if new_dict and isinstance(new_dict.get_object().get_type(), Dict): args = arguments.ObjectArguments([new_dict]) items = new_dict.get_object()['popitem'].\ - get_object().get_returned_object(args) + get_object().get_returned_object(args) context.save_per_name(items) else: holding = _infer_sequence_for_pyname(new_dict) @@ -405,7 +408,8 @@ def __init__(self, *objects): first = objects[0] attributes = { '__getitem__': BuiltinName(BuiltinFunction(first)), - '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), + '__getslice__': + BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)), '__iter__': BuiltinName(BuiltinFunction(get_iterator(first)))} super(Tuple, self).__init__(tuple, attributes) @@ -485,8 +489,9 @@ def __init__(self): self_methods = ['__getitem__', '__getslice__', 'capitalize', 'center', 'decode', 'encode', 'expandtabs', 'join', 'ljust', - 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip', - 'swapcase', 'title', 'translate', 'upper', 'zfill'] + 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', + 'strip', 'swapcase', 'title', 'translate', 'upper', + 'zfill'] for method in self_methods: collector(method, self_object) @@ -514,6 +519,7 @@ def get_object(self): def get_definition_location(self): return (None, None) + class Iterator(pyobjects.AbstractClass): def __init__(self, holding=None): @@ -539,7 +545,8 @@ def __init__(self, holding=None): self.holding = holding self.attributes = { 'next': BuiltinName(BuiltinFunction(self.holding)), - '__iter__': BuiltinName(BuiltinFunction(get_iterator(self.holding))), + '__iter__': BuiltinName(BuiltinFunction( + get_iterator(self.holding))), 'close': BuiltinName(BuiltinFunction()), 'send': BuiltinName(BuiltinFunction()), 'throw': BuiltinName(BuiltinFunction())} @@ -556,10 +563,10 @@ def get_returned_object(self, args): class File(BuiltinClass): def __init__(self): - self_object = pyobjects.PyObject(self) str_object = get_str() str_list = get_list(get_str()) attributes = {} + def add(name, returned=None, function=None): builtin = getattr(file, name, None) attributes[name] = BuiltinName( @@ -587,7 +594,8 @@ def __init__(self, fget=None, fset=None, fdel=None, fdoc=None): 'fget': BuiltinName(BuiltinFunction()), 'fset': BuiltinName(pynames.UnboundName()), 'fdel': BuiltinName(pynames.UnboundName()), - '__new__': BuiltinName(BuiltinFunction(function=_property_function))} + '__new__': BuiltinName( + BuiltinFunction(function=_property_function))} super(Property, self).__init__(property, attributes) def get_property_object(self, args): @@ -631,7 +639,7 @@ def get_attributes(self): return {} def get_name(self): - return 'lambda' + return 'lambda' def get_param_names(self, special_args=True): result = [node.id for node in self.arguments.args @@ -671,7 +679,7 @@ def _infer_sequence_for_pyname(pyname): iter = obj.get_returned_object(args) if iter is not None and 'next' in iter: holding = iter['next'].get_object().\ - get_returned_object(args) + get_returned_object(args) return holding @@ -690,12 +698,15 @@ def _create_builtin(args, creator): def _range_function(args): return get_list() + def _reversed_function(args): return _create_builtin(args, get_iterator) + def _sorted_function(args): return _create_builtin(args, get_list) + def _super_function(args): passed_class, passed_self = args.get_arguments(['type', 'self']) if passed_self is None: @@ -709,6 +720,7 @@ def _super_function(args): return pyobjects.PyObject(supers[0]) return passed_self + def _zip_function(args): args = args.get_pynames(['sequence']) objects = [] @@ -721,6 +733,7 @@ def _zip_function(args): tuple = get_tuple(*objects) return get_list(tuple) + def _enumerate_function(args): passed = args.get_pynames(['sequence'])[0] if passed is None: @@ -730,6 +743,7 @@ def _enumerate_function(args): tuple = get_tuple(None, holding) return get_iterator(tuple) + def _iter_function(args): passed = args.get_pynames(['sequence'])[0] if passed is None: @@ -738,6 +752,7 @@ def _iter_function(args): holding = _infer_sequence_for_pyname(passed) return get_iterator(holding) + def _input_function(args): return get_str() @@ -751,17 +766,25 @@ def _input_function(args): 'file': BuiltinName(get_file_type()), 'open': BuiltinName(get_file_type()), 'unicode': BuiltinName(get_str_type()), - 'range': BuiltinName(BuiltinFunction(function=_range_function, builtin=range)), - 'reversed': BuiltinName(BuiltinFunction(function=_reversed_function, builtin=reversed)), - 'sorted': BuiltinName(BuiltinFunction(function=_sorted_function, builtin=sorted)), - 'super': BuiltinName(BuiltinFunction(function=_super_function, builtin=super)), - 'property': BuiltinName(BuiltinFunction(function=_property_function, builtin=property)), + 'range': BuiltinName(BuiltinFunction(function=_range_function, + builtin=range)), + 'reversed': BuiltinName(BuiltinFunction(function=_reversed_function, + builtin=reversed)), + 'sorted': BuiltinName(BuiltinFunction(function=_sorted_function, + builtin=sorted)), + 'super': BuiltinName(BuiltinFunction(function=_super_function, + builtin=super)), + 'property': BuiltinName(BuiltinFunction(function=_property_function, + builtin=property)), 'zip': BuiltinName(BuiltinFunction(function=_zip_function, builtin=zip)), - 'enumerate': BuiltinName(BuiltinFunction(function=_enumerate_function, builtin=enumerate)), + 'enumerate': BuiltinName(BuiltinFunction(function=_enumerate_function, + builtin=enumerate)), 'object': BuiltinName(BuiltinObject()), 'type': BuiltinName(BuiltinType()), - 'iter': BuiltinName(BuiltinFunction(function=_iter_function, builtin=iter)), - 'raw_input': BuiltinName(BuiltinFunction(function=_input_function, builtin=raw_input)), - } + 'iter': BuiltinName(BuiltinFunction(function=_iter_function, + builtin=iter)), + 'raw_input': BuiltinName(BuiltinFunction(function=_input_function, + builtin=raw_input)), +} builtins = BuiltinModule('__builtin__', initial=_initial_builtins) diff --git a/pymode/libs2/rope/base/change.py b/pymode/libs2/rope/base/change.py index 8d19aac1..e9764484 100644 --- a/pymode/libs2/rope/base/change.py +++ b/pymode/libs2/rope/base/change.py @@ -2,7 +2,6 @@ import difflib import os import time -import warnings import rope.base.fscommands from rope.base import taskhandle, exceptions, utils @@ -17,13 +16,13 @@ class Change(object): def do(self, job_set=None): """Perform the change - + .. note:: Do use this directly. Use `Project.do()` instead. """ def undo(self, job_set=None): """Perform the change - + .. note:: Do use this directly. Use `History.undo()` instead. """ @@ -97,7 +96,8 @@ def __str__(self): date = datetime.datetime.fromtimestamp(self.time) if date.date() == datetime.date.today(): string_date = 'today' - elif date.date() == (datetime.date.today() - datetime.timedelta(1)): + elif date.date() == (datetime.date.today() - + datetime.timedelta(1)): string_date = 'yesterday' elif date.year == datetime.date.today().year: string_date = date.strftime('%b %d') @@ -257,7 +257,8 @@ class CreateFolder(CreateResource): """ def __init__(self, parent, name): - resource = parent.project.get_folder(self._get_child_path(parent, name)) + resource = parent.project.get_folder( + self._get_child_path(parent, name)) super(CreateFolder, self).__init__(resource) @@ -309,6 +310,7 @@ def count_changes(change): return result return 1 + def create_job_set(task_handle, change): return task_handle.create_jobset(str(change), count_changes(change)) diff --git a/pymode/libs2/rope/base/codeanalyze.py b/pymode/libs2/rope/base/codeanalyze.py index 3d2a2a45..87061912 100644 --- a/pymode/libs2/rope/base/codeanalyze.py +++ b/pymode/libs2/rope/base/codeanalyze.py @@ -18,6 +18,7 @@ def add_change(self, start, end, new_text=None): def get_changed(self): if not self.changes: return None + def compare_changes(change1, change2): return cmp(change1[:2], change2[:2]) self.changes.sort(compare_changes) @@ -131,6 +132,7 @@ def __call__(self): return result _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]') + def _analyze_line(self, line): char = None for match in self._main_chars.finditer(line): @@ -142,8 +144,8 @@ def _analyze_line(self, line): if char * 3 == line[i:i + 3]: self.in_string = char * 3 elif self.in_string == line[i:i + len(self.in_string)] and \ - not (i > 0 and line[i - 1] == '\\' and - not (i > 1 and line[i - 2] == '\\')): + not (i > 0 and line[i - 1] == '\\' and + not (i > 1 and line[i - 2] == '\\')): self.in_string = '' if self.in_string: continue @@ -158,6 +160,7 @@ def _analyze_line(self, line): else: self.continuation = False + def custom_generator(lines): return _CustomGenerator(lines)() @@ -189,7 +192,6 @@ def generate_regions(self, start_line=1, end_line=None): # XXX: `block_start` should be at a better position! block_start = 1 readline = LinesToReadline(self.lines, block_start) - shifted = start_line - block_start + 1 try: for start, end in self._logical_lines(readline): real_start = start + block_start - 1 @@ -199,7 +201,7 @@ def generate_regions(self, start_line=1, end_line=None): real_end = end + block_start - 1 if real_start >= start_line: yield (real_start, real_end) - except tokenize.TokenError, e: + except tokenize.TokenError: pass def _block_logical_line(self, block_start, line_number): @@ -254,6 +256,7 @@ def __init__(self, lines, generate=custom_generator): self._generate = generate _starts = None + @property def starts(self): if self._starts is None: @@ -261,6 +264,7 @@ def starts(self): return self._starts _ends = None + @property def ends(self): if self._ends is None: @@ -326,6 +330,7 @@ def get_block_start(lines, lineno, maximum_indents=80): _block_start_pattern = None + def get_block_start_patterns(): global _block_start_pattern if not _block_start_pattern: @@ -350,9 +355,10 @@ def count_line_indents(line): def get_string_pattern(): start = r'(\b[uU]?[rR]?)?' longstr = r'%s"""(\\.|"(?!"")|\\\n|[^"\\])*"""' % start - shortstr = r'%s"(\\.|[^"\\\n])*"' % start + shortstr = r'%s"(\\.|\\\n|[^"\\])*"' % start return '|'.join([longstr, longstr.replace('"', "'"), shortstr, shortstr.replace('"', "'")]) + def get_comment_pattern(): return r'#[^\n]*' diff --git a/pymode/libs2/rope/base/default_config.py b/pymode/libs2/rope/base/default_config.py index ffebcd4f..0ee9937d 100644 --- a/pymode/libs2/rope/base/default_config.py +++ b/pymode/libs2/rope/base/default_config.py @@ -14,7 +14,7 @@ def set_prefs(prefs): # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', - '.hg', '.svn', '_svn', '.git'] + '.hg', '.svn', '_svn', '.git', '.tox'] # Specifies which files should be considered python files. It is # useful when you have scripts inside your project. Only files @@ -79,6 +79,16 @@ def set_prefs(prefs): # appear in the importing namespace. prefs['ignore_bad_imports'] = False + # If `True`, rope will transform a comma list of imports into + # multiple separate import statements when organizing + # imports. + prefs['split_imports'] = False + + # If `True`, rope will sort imports alphabetically by module name + # instead of alphabetically by import statement, with from imports + # after normal imports. + prefs['sort_imports_alphabetically'] = False + def project_opened(project): """This function is called after opening the project""" diff --git a/pymode/libs2/rope/base/evaluate.py b/pymode/libs2/rope/base/evaluate.py index 6736b2a9..faf09407 100644 --- a/pymode/libs2/rope/base/evaluate.py +++ b/pymode/libs2/rope/base/evaluate.py @@ -6,6 +6,7 @@ BadIdentifierError = exceptions.BadIdentifierError + def eval_location(pymodule, offset): """Find the pyname at the offset""" return eval_location2(pymodule, offset)[1] @@ -40,7 +41,8 @@ def eval_str2(holding_scope, name): # parenthesizing for handling cases like 'a_var.\nattr' node = ast.parse('(%s)' % name) except SyntaxError: - raise BadIdentifierError('Not a resolvable python identifier selected.') + raise BadIdentifierError( + 'Not a resolvable python identifier selected.') return eval_node2(holding_scope, node) @@ -81,7 +83,8 @@ def get_primary_and_pyname_at(self, offset): keyword_name = self.worder.get_word_at(offset) pyobject = self.get_enclosing_function(offset) if isinstance(pyobject, pyobjects.PyFunction): - return (None, pyobject.get_parameters().get(keyword_name, None)) + return (None, + pyobject.get_parameters().get(keyword_name, None)) # class body if self._is_defined_in_class_body(holding_scope, offset, lineno): class_scope = holding_scope @@ -93,7 +96,8 @@ def get_primary_and_pyname_at(self, offset): except rope.base.exceptions.AttributeNotFoundError: return (None, None) # function header - if self._is_function_name_in_function_header(holding_scope, offset, lineno): + if self._is_function_name_in_function_header(holding_scope, + offset, lineno): name = self.worder.get_primary_at(offset).strip() return (None, holding_scope.parent[name]) # from statement module @@ -118,7 +122,7 @@ def get_enclosing_function(self, offset): if isinstance(pyobject, pyobjects.AbstractFunction): return pyobject elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: + '__init__' in pyobject: return pyobject['__init__'].get_object() elif '__call__' in pyobject: return pyobject['__call__'].get_object() @@ -157,6 +161,7 @@ def _Call(self, node): primary, pyobject = self._get_primary_and_object_for_node(node.func) if pyobject is None: return + def _get_returned(pyobject): args = arguments.create_arguments(primary, pyobject, node, self.scope) @@ -295,7 +300,8 @@ def _call_function(self, node, function_name, other_args=None): return if function_name in pyobject: called = pyobject[function_name].get_object() - if not called or not isinstance(called, pyobjects.AbstractFunction): + if not called or \ + not isinstance(called, pyobjects.AbstractFunction): return args = [node] if other_args: diff --git a/pymode/libs2/rope/base/fscommands.py b/pymode/libs2/rope/base/fscommands.py index 3bc22044..daf118a0 100644 --- a/pymode/libs2/rope/base/fscommands.py +++ b/pymode/libs2/rope/base/fscommands.py @@ -199,12 +199,14 @@ def unicode_to_file_data(contents, encoding=None): except UnicodeEncodeError: return contents.encode('utf-8') + def file_data_to_unicode(data, encoding=None): result = _decode_data(data, encoding) if '\r' in result: result = result.replace('\r\n', '\n').replace('\r', '\n') return result + def _decode_data(data, encoding): if isinstance(data, unicode): return data @@ -227,7 +229,6 @@ def read_file_coding(path): file = open(path, 'b') count = 0 result = [] - buffsize = 10 while True: current = file.read(10) if not current: diff --git a/pymode/libs2/rope/base/libutils.py b/pymode/libs2/rope/base/libutils.py index cb9381e3..4037f183 100644 --- a/pymode/libs2/rope/base/libutils.py +++ b/pymode/libs2/rope/base/libutils.py @@ -3,6 +3,8 @@ import rope.base.project import rope.base.pycore +from rope.base import pyobjectsdef +from rope.base import utils from rope.base import taskhandle @@ -17,7 +19,7 @@ def path_to_resource(project, path, type=None): `Project.get_file()`, and `Project.get_folder()` methods. """ - project_path = relative(project.address, path) + project_path = path_relative_to_project_root(project, path) if project_path is None: project_path = rope.base.project._realpath(path) project = rope.base.project.get_no_project() @@ -29,13 +31,19 @@ def path_to_resource(project, path, type=None): return project.get_folder(project_path) return None + +def path_relative_to_project_root(project, path): + return relative(project.address, path) + +@utils.deprecated() def relative(root, path): root = rope.base.project._realpath(root).replace(os.path.sep, '/') path = rope.base.project._realpath(path).replace(os.path.sep, '/') if path == root: - return '' + return '' if path.startswith(root + '/'): - return path[len(root) + 1:] + return path[len(root) + 1:] + def report_change(project, path, old_content): """Report that the contents of file at `path` was changed @@ -52,14 +60,63 @@ def report_change(project, path, old_content): rope.base.pycore.perform_soa_on_changed_scopes(project, resource, old_content) + +def analyze_module(project, resource): + """Perform static object analysis on a python file in the project + + Note that this might be really time consuming. + """ + project.pycore.analyze_module(resource) + + def analyze_modules(project, task_handle=taskhandle.NullTaskHandle()): """Perform static object analysis on all python files in the project Note that this might be really time consuming. """ - resources = project.pycore.get_python_files() + resources = project.get_python_files() job_set = task_handle.create_jobset('Analyzing Modules', len(resources)) for resource in resources: job_set.started_job(resource.path) - project.pycore.analyze_module(resource) + analyze_module(project, resource) job_set.finished_job() + + +def get_string_module(project, code, resource=None, force_errors=False): + """Returns a `PyObject` object for the given code + + If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is + raised if module has syntax errors. This overrides + ``ignore_syntax_errors`` project config. + + """ + return pyobjectsdef.PyModule(project.pycore, code, resource, + force_errors=force_errors) + + +def get_string_scope(project, code, resource=None): + """Returns a `Scope` object for the given code""" + return get_string_module(project, code, resource).get_scope() + + +def is_python_file(project, resource): + return project.pycore.is_python_file(resource) + + +def modname(resource): + if resource.is_folder(): + module_name = resource.name + source_folder = resource.parent + elif resource.name == '__init__.py': + module_name = resource.parent.name + source_folder = resource.parent.parent + else: + module_name = resource.name[:-3] + source_folder = resource.parent + + while source_folder != source_folder.parent and \ + source_folder.has_child('__init__.py'): + module_name = source_folder.name + '.' + module_name + source_folder = source_folder.parent + + return module_name diff --git a/pymode/libs2/rope/base/oi/doa.py b/pymode/libs2/rope/base/oi/doa.py index 12f50553..1b2a00fc 100644 --- a/pymode/libs2/rope/base/oi/doa.py +++ b/pymode/libs2/rope/base/oi/doa.py @@ -25,11 +25,11 @@ def run(self): """Execute the process""" env = dict(os.environ) file_path = self.file.real_path - path_folders = self.pycore.get_source_folders() + \ - self.pycore.get_python_path_folders() + path_folders = self.pycore.project.get_source_folders() + \ + self.pycore.project.get_python_path_folders() env['PYTHONPATH'] = os.pathsep.join(folder.real_path for folder in path_folders) - runmod_path = self.pycore.find_module('rope.base.oi.runmod').real_path + runmod_path = self.pycore.project.find_module('rope.base.oi.runmod').real_path self.receiver = None self._init_data_receiving() send_info = '-' @@ -56,7 +56,8 @@ def _init_data_receiving(self): self.receiver = _SocketReceiver() else: self.receiver = _FIFOReceiver() - self.receiving_thread = threading.Thread(target=self._receive_information) + self.receiving_thread = threading.Thread( + target=self._receive_information) self.receiving_thread.setDaemon(True) self.receiving_thread.start() @@ -114,7 +115,7 @@ def __init__(self): try: self.server_socket.bind(('', self.data_port)) break - except socket.error, e: + except socket.error: self.data_port += 1 self.server_socket.listen(1) diff --git a/pymode/libs2/rope/base/oi/runmod.py b/pymode/libs2/rope/base/oi/runmod.py index 8170623c..e332d7e6 100644 --- a/pymode/libs2/rope/base/oi/runmod.py +++ b/pymode/libs2/rope/base/oi/runmod.py @@ -40,9 +40,9 @@ def send_data(self, data): def close(self): self.my_file.close() - def _cached(func): cache = {} + def newfunc(self, arg): if arg in cache: return cache[arg] @@ -76,7 +76,8 @@ def on_function_call(self, frame, event, arg): code = frame.f_code for argname in code.co_varnames[:code.co_argcount]: try: - args.append(self._object_to_persisted_form(frame.f_locals[argname])) + args.append(self._object_to_persisted_form( + frame.f_locals[argname])) except (TypeError, AttributeError): args.append(('unknown',)) try: @@ -94,17 +95,19 @@ def on_function_call(self, frame, event, arg): def _is_an_interesting_call(self, frame): #if frame.f_code.co_name in ['?', '']: # return False - #return not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code) + #return not frame.f_back or + # not self._is_code_inside_project(frame.f_back.f_code) if not self._is_code_inside_project(frame.f_code) and \ - (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)): + (not frame.f_back or + not self._is_code_inside_project(frame.f_back.f_code)): return False return True def _is_code_inside_project(self, code): source = self._path(code.co_filename) return source is not None and os.path.exists(source) and \ - _realpath(source).startswith(self.project_root) + _realpath(source).startswith(self.project_root) @_cached def _get_persisted_code(self, object_): @@ -128,7 +131,8 @@ def _get_persisted_builtin(self, object_): holding = None if len(object_) > 0: holding = object_[0] - return ('builtin', 'list', self._object_to_persisted_form(holding)) + return ('builtin', 'list', + self._object_to_persisted_form(holding)) if isinstance(object_, dict): keys = None values = None @@ -152,7 +156,8 @@ def _get_persisted_builtin(self, object_): for o in object_: holding = o break - return ('builtin', 'set', self._object_to_persisted_form(holding)) + return ('builtin', 'set', + self._object_to_persisted_form(holding)) return ('unknown',) def _object_to_persisted_form(self, object_): diff --git a/pymode/libs2/rope/base/oi/soa.py b/pymode/libs2/rope/base/oi/soa.py index 38cd5c9d..a34b970e 100644 --- a/pymode/libs2/rope/base/oi/soa.py +++ b/pymode/libs2/rope/base/oi/soa.py @@ -26,9 +26,11 @@ def _analyze_node(pycore, pydefined, should_analyze, new_followed_calls = max(0, followed_calls - 1) return_true = lambda pydefined: True return_false = lambda pydefined: False + def _follow(pyfunction): _analyze_node(pycore, pyfunction, return_true, return_false, new_followed_calls) + if not followed_calls: _follow = None visitor = SOAVisitor(pycore, pydefined, _follow) @@ -113,7 +115,8 @@ def _Assign(self, node): args_pynames.append(evaluate.eval_node(self.scope, subscript.slice.value)) value = rope.base.oi.soi._infer_assignment( - rope.base.pynames.AssignmentValue(node.value, levels), self.pymodule) + rope.base.pynames.AssignmentValue(node.value, levels), + self.pymodule) args_pynames.append(rope.base.pynames.UnboundName(value)) if instance is not None and value is not None: pyobject = instance.get_object() diff --git a/pymode/libs2/rope/base/oi/soi.py b/pymode/libs2/rope/base/oi/soi.py index bf40af90..5a11b5ef 100644 --- a/pymode/libs2/rope/base/oi/soi.py +++ b/pymode/libs2/rope/base/oi/soi.py @@ -30,6 +30,7 @@ def infer_returned_object(pyfunction, args): return result return object_info.get_returned(pyfunction, args) + @_ignore_inferred def infer_parameter_objects(pyfunction): """Infer the `PyObject`\s of parameters of this `PyFunction`""" @@ -40,6 +41,7 @@ def infer_parameter_objects(pyfunction): _handle_first_parameter(pyfunction, result) return result + def _handle_first_parameter(pyobject, parameters): kind = pyobject.get_kind() if parameters is None or kind not in ['method', 'classmethod']: @@ -53,6 +55,7 @@ def _handle_first_parameter(pyobject, parameters): if kind == 'classmethod': parameters[0] = pyobject.parent + @_ignore_inferred def infer_assigned_object(pyname): if not pyname.assignments: @@ -62,6 +65,7 @@ def infer_assigned_object(pyname): if result is not None: return result + def get_passed_objects(pyfunction, parameter_index): object_info = pyfunction.pycore.object_info result = object_info.get_passed_objects(pyfunction, @@ -72,6 +76,7 @@ def get_passed_objects(pyfunction, parameter_index): result.append(statically_inferred[parameter_index]) return result + def _infer_returned(pyobject, args): if args: # HACK: Setting parameter objects manually @@ -99,12 +104,14 @@ def _infer_returned(pyobject, args): except rope.base.pyobjects.IsBeingInferredError: pass + def _parameter_objects(pyobject): params = pyobject.get_param_names(special_args=False) return [rope.base.pyobjects.get_unknown()] * len(params) # handling `rope.base.pynames.AssignmentValue` + @_ignore_inferred def _infer_assignment(assignment, pymodule): result = _follow_pyname(assignment, pymodule) @@ -116,6 +123,7 @@ def _infer_assignment(assignment, pymodule): return None return _follow_levels(assignment, pyobject) + def _follow_levels(assignment, pyobject): for index in assignment.levels: if isinstance(pyobject.get_type(), rope.base.builtins.Tuple): @@ -132,6 +140,7 @@ def _follow_levels(assignment, pyobject): break return pyobject + @_ignore_inferred def _follow_pyname(assignment, pymodule, lineno=None): assign_node = assignment.ast_node @@ -149,6 +158,7 @@ def _follow_pyname(assignment, pymodule, lineno=None): arguments.ObjectArguments([arg])) return pyname, result + @_ignore_inferred def _follow_evaluations(assignment, pyname, pyobject): new_pyname = pyname @@ -181,6 +191,7 @@ def _get_lineno_for_node(assign_node): return assign_node.lineno return 1 + def _get_attribute(pyobject, name): if pyobject is not None and name in pyobject: return pyobject[name] diff --git a/pymode/libs2/rope/base/oi/transform.py b/pymode/libs2/rope/base/oi/transform.py index 5a9d600e..aa29c373 100644 --- a/pymode/libs2/rope/base/oi/transform.py +++ b/pymode/libs2/rope/base/oi/transform.py @@ -120,7 +120,6 @@ def transform(self, textual): return None def builtin_to_pyobject(self, textual): - name = textual[1] method = getattr(self, 'builtin_%s_to_pyobject' % textual[1], None) if method is not None: return method(textual) @@ -203,7 +202,7 @@ def instance_to_pyobject(self, textual): def _get_pymodule(self, path): resource = self.path_to_resource(path) if resource is not None: - return self.project.pycore.resource_to_pyobject(resource) + return self.project.get_pymodule(resource) def path_to_resource(self, path): try: @@ -221,7 +220,7 @@ def path_to_resource(self, path): class DOITextualToPyObject(TextualToPyObject): """For transforming textual form to `PyObject` - + The textual form DOI uses is different from rope's standard textual form. The reason is that we cannot find the needed information by analyzing live objects. This class can be @@ -253,7 +252,8 @@ def _class_to_pyobject(self, textual): isinstance(suspected, rope.base.pyobjects.PyClass): return suspected else: - lineno = self._find_occurrence(name, pymodule.get_resource().read()) + lineno = self._find_occurrence(name, + pymodule.get_resource().read()) if lineno is not None: inner_scope = module_scope.get_inner_scope_for_line(lineno) return inner_scope.pyobject @@ -278,8 +278,8 @@ def _find_occurrence(self, name, source): def path_to_resource(self, path): import rope.base.libutils - root = self.project.address - relpath = rope.base.libutils.relative(root, path) + relpath = rope.base.libutils.path_relative_to_project_root( + self.project, path) if relpath is not None: path = relpath return super(DOITextualToPyObject, self).path_to_resource(path) diff --git a/pymode/libs2/rope/base/prefs.py b/pymode/libs2/rope/base/prefs.py index 674a58ec..2ab45dac 100644 --- a/pymode/libs2/rope/base/prefs.py +++ b/pymode/libs2/rope/base/prefs.py @@ -27,7 +27,7 @@ def get(self, key, default=None): def add_callback(self, key, callback): """Add `key` preference with `callback` function - + Whenever `key` is set the callback is called with the given `value` as parameter. diff --git a/pymode/libs2/rope/base/project.py b/pymode/libs2/rope/base/project.py index 97d2dd3e..23597f8c 100644 --- a/pymode/libs2/rope/base/project.py +++ b/pymode/libs2/rope/base/project.py @@ -6,8 +6,9 @@ import rope.base.fscommands from rope.base import exceptions, taskhandle, prefs, history, pycore, utils -from rope.base.resourceobserver import * +import rope.base.resourceobserver as resourceobserver from rope.base.resources import File, Folder, _ResourceMatcher +from rope.base.exceptions import ModuleNotFoundError class _Project(object): @@ -17,6 +18,7 @@ def __init__(self, fscommands): self.fscommands = fscommands self.prefs = prefs.Prefs() self.data_files = _DataFiles(self) + self._custom_source_folders = [] def get_resource(self, resource_name): """Get a resource in a project. @@ -41,6 +43,40 @@ def get_resource(self, resource_name): raise exceptions.ResourceNotFoundError('Unknown resource ' + resource_name) + def get_module(self, name, folder=None): + """Returns a `PyObject` if the module was found.""" + # check if this is a builtin module + pymod = self.pycore.builtin_module(name) + if pymod is not None: + return pymod + module = self.find_module(name, folder) + if module is None: + raise ModuleNotFoundError('Module %s not found' % name) + return self.pycore.resource_to_pyobject(module) + + def get_python_path_folders(self): + result = [] + for src in self.prefs.get('python_path', []) + sys.path: + try: + src_folder = get_no_project().get_resource(src) + result.append(src_folder) + except exceptions.ResourceNotFoundError: + pass + return result + + # INFO: It was decided not to cache source folders, since: + # - Does not take much time when the root folder contains + # packages, that is most of the time + # - We need a separate resource observer; `self.observer` + # does not get notified about module and folder creations + def get_source_folders(self): + """Returns project source folders""" + if self.root is None: + return [] + result = list(self._custom_source_folders) + result.extend(self.pycore._find_source_folders(self.root)) + return result + def validate(self, folder): """Validate files and folders contained in this folder @@ -71,6 +107,9 @@ def do(self, changes, task_handle=taskhandle.NullTaskHandle()): """ self.history.do(changes, task_handle=task_handle) + def get_pymodule(self, resource, force_errors=False): + return self.pycore.resource_to_pyobject(resource, force_errors) + def get_pycore(self): return self.pycore @@ -82,12 +121,45 @@ def get_folder(self, path): """Get the folder with `path` (it may not exist)""" return Folder(self, path) - def is_ignored(self, resource): - return False - def get_prefs(self): return self.prefs + def get_relative_module(self, name, folder, level): + module = self.find_relative_module(name, folder, level) + if module is None: + raise ModuleNotFoundError('Module %s not found' % name) + return self.pycore.resource_to_pyobject(module) + + def find_module(self, modname, folder=None): + """Returns a resource corresponding to the given module + + returns None if it can not be found + """ + for src in self.get_source_folders(): + module = _find_module_in_folder(src, modname) + if module is not None: + return module + for src in self.get_python_path_folders(): + module = _find_module_in_folder(src, modname) + if module is not None: + return module + if folder is not None: + module = _find_module_in_folder(folder, modname) + if module is not None: + return module + return None + + def find_relative_module(self, modname, folder, level): + for i in range(level - 1): + folder = folder.parent + if modname == '': + return folder + else: + return _find_module_in_folder(folder, modname) + + def is_ignored(self, resource): + return False + def _get_resource_path(self, name): pass @@ -144,10 +216,22 @@ def __init__(self, projectroot, fscommands=None, if ropefolder is not None: self.prefs['ignored_resources'] = [ropefolder] self._init_prefs(prefs) + self._init_source_folders() + + @utils.deprecated('Delete once deprecated functions are gone') + def _init_source_folders(self): + for path in self.prefs.get('source_folders', []): + folder = self.get_resource(path) + self._custom_source_folders.append(folder) def get_files(self): return self.file_list.get_files() + def get_python_files(self): + """Returns all python files available in the project""" + return [resource for resource in self.get_files() + if self.pycore.is_python_file(resource)] + def _get_resource_path(self, name): return os.path.join(self._address, *name.split('/')) @@ -244,6 +328,9 @@ def get_resource(self, name): def get_files(self): return [] + def get_python_files(self): + return [] + _no_project = None @@ -258,7 +345,7 @@ class _FileListCacher(object): def __init__(self, project): self.project = project self.files = None - rawobserver = ResourceObserver( + rawobserver = resourceobserver.ResourceObserver( self._changed, self._invalid, self._invalid, self._invalid, self._invalid) self.project.add_observer(rawobserver) @@ -334,7 +421,7 @@ def write(self): def _can_compress(self): try: - import gzip + import gzip # noqa return True except ImportError: return False @@ -371,5 +458,24 @@ def _realpath(path): if sys.platform == 'cygwin': if path[1:3] == ':\\': return path + elif path[1:3] == ':/': + path = "/cygdrive/" + path[0] + path[2:] return os.path.abspath(os.path.expanduser(path)) return os.path.realpath(os.path.abspath(os.path.expanduser(path))) + + +def _find_module_in_folder(folder, modname): + module = folder + packages = modname.split('.') + for pkg in packages[:-1]: + if module.is_folder() and module.has_child(pkg): + module = module.get_child(pkg) + else: + return None + if module.is_folder(): + if module.has_child(packages[-1]) and \ + module.get_child(packages[-1]).is_folder(): + return module.get_child(packages[-1]) + elif module.has_child(packages[-1] + '.py') and \ + not module.get_child(packages[-1] + '.py').is_folder(): + return module.get_child(packages[-1] + '.py') diff --git a/pymode/libs2/rope/base/pycore.py b/pymode/libs2/rope/base/pycore.py index 32056a0f..c4c1195a 100644 --- a/pymode/libs2/rope/base/pycore.py +++ b/pymode/libs2/rope/base/pycore.py @@ -3,15 +3,19 @@ import sys import warnings +import rope.base.libutils +import rope.base.resourceobserver +import rope.base.resources import rope.base.oi.doa import rope.base.oi.objectinfo import rope.base.oi.soa -from rope.base import ast, exceptions, taskhandle, utils, stdmods -from rope.base.exceptions import ModuleNotFoundError -from rope.base.pyobjectsdef import PyModule, PyPackage, PyClass -import rope.base.resources -import rope.base.resourceobserver from rope.base import builtins +from rope.base import exceptions +from rope.base import stdmods +from rope.base import taskhandle +from rope.base import utils +from rope.base.exceptions import ModuleNotFoundError +from rope.base.pyobjectsdef import PyModule, PyPackage class PyCore(object): @@ -25,7 +29,6 @@ def __init__(self, project): self.object_info = rope.base.oi.objectinfo.ObjectInfoManager(project) self._init_python_files() self._init_automatic_soa() - self._init_source_folders() def _init_python_files(self): self.python_matcher = None @@ -38,15 +41,10 @@ def _init_resource_observer(self): callback = self._invalidate_resource_cache observer = rope.base.resourceobserver.ResourceObserver( changed=callback, moved=callback, removed=callback) - self.observer = rope.base.resourceobserver.FilteredResourceObserver(observer) + self.observer = \ + rope.base.resourceobserver.FilteredResourceObserver(observer) self.project.add_observer(self.observer) - def _init_source_folders(self): - self._custom_source_folders = [] - for path in self.project.prefs.get('source_folders', []): - folder = self.project.get_resource(path) - self._custom_source_folders.append(folder) - def _init_automatic_soa(self): if not self.automatic_soa: return @@ -62,7 +60,7 @@ def automatic_soa(self): def _file_changed_for_soa(self, resource, new_resource=None): old_contents = self.project.history.\ - contents_before_current_change(resource) + contents_before_current_change(resource) if old_contents is not None: perform_soa_on_changed_scopes(self.project, resource, old_contents) @@ -73,16 +71,10 @@ def is_python_file(self, resource): return resource.name.endswith('.py') return self.python_matcher.does_match(resource) + @utils.deprecated('Use `project.get_module` instead') def get_module(self, name, folder=None): """Returns a `PyObject` if the module was found.""" - # check if this is a builtin module - pymod = self._builtin_module(name) - if pymod is not None: - return pymod - module = self.find_module(name, folder) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) + return self.project.get_module(name, folder) def _builtin_submodules(self, modname): result = {} @@ -90,18 +82,17 @@ def _builtin_submodules(self, modname): if extension.startswith(modname + '.'): name = extension[len(modname) + 1:] if '.' not in name: - result[name] = self._builtin_module(extension) + result[name] = self.builtin_module(extension) return result - def _builtin_module(self, name): + def builtin_module(self, name): return self.extension_cache.get_pymodule(name) + @utils.deprecated('Use `project.get_relative_module` instead') def get_relative_module(self, name, folder, level): - module = self.find_relative_module(name, folder, level) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) + return self.project.get_relative_module(name, folder, level) + @utils.deprecated('Use `libutils.get_string_module` instead') def get_string_module(self, code, resource=None, force_errors=False): """Returns a `PyObject` object for the given code @@ -112,92 +103,48 @@ def get_string_module(self, code, resource=None, force_errors=False): """ return PyModule(self, code, resource, force_errors=force_errors) + @utils.deprecated('Use `libutils.get_string_scope` instead') def get_string_scope(self, code, resource=None): """Returns a `Scope` object for the given code""" - return self.get_string_module(code, resource).get_scope() + return rope.base.libutils.get_string_scope(code, resource) def _invalidate_resource_cache(self, resource, new_resource=None): for observer in self.cache_observers: observer(resource) - def _find_module_in_folder(self, folder, modname): - module = folder - packages = modname.split('.') - for pkg in packages[:-1]: - if module.is_folder() and module.has_child(pkg): - module = module.get_child(pkg) - else: - return None - if module.is_folder(): - if module.has_child(packages[-1]) and \ - module.get_child(packages[-1]).is_folder(): - return module.get_child(packages[-1]) - elif module.has_child(packages[-1] + '.py') and \ - not module.get_child(packages[-1] + '.py').is_folder(): - return module.get_child(packages[-1] + '.py') - + @utils.deprecated('Use `project.get_python_path_folders` instead') def get_python_path_folders(self): - import rope.base.project - result = [] - for src in self.project.prefs.get('python_path', []) + sys.path: - try: - src_folder = rope.base.project.get_no_project().get_resource(src) - result.append(src_folder) - except rope.base.exceptions.ResourceNotFoundError: - pass - return result + return self.project.get_python_path_folders() + @utils.deprecated('Use `project.find_module` instead') def find_module(self, modname, folder=None): """Returns a resource corresponding to the given module returns None if it can not be found """ - return self._find_module(modname, folder) + return self.project.find_module(modname, folder) + @utils.deprecated('Use `project.find_relative_module` instead') def find_relative_module(self, modname, folder, level): - for i in range(level - 1): - folder = folder.parent - if modname == '': - return folder - else: - return self._find_module_in_folder(folder, modname) - - def _find_module(self, modname, folder=None): - """Return `modname` module resource""" - for src in self.get_source_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - for src in self.get_python_path_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - if folder is not None: - module = self._find_module_in_folder(folder, modname) - if module is not None: - return module - return None + return self.project.find_relative_module(modname, folder, level) # INFO: It was decided not to cache source folders, since: # - Does not take much time when the root folder contains # packages, that is most of the time # - We need a separate resource observer; `self.observer` # does not get notified about module and folder creations + @utils.deprecated('Use `project.get_source_folders` instead') def get_source_folders(self): """Returns project source folders""" - if self.project.root is None: - return [] - result = list(self._custom_source_folders) - result.extend(self._find_source_folders(self.project.root)) - return result + return self.project.get_source_folders() def resource_to_pyobject(self, resource, force_errors=False): return self.module_cache.get_pymodule(resource, force_errors) + @utils.deprecated('Use `project.get_python_files` instead') def get_python_files(self): """Returns all python files available in the project""" - return [resource for resource in self.project.get_files() - if self.is_python_file(resource)] + return self.project.get_python_files() def _is_package(self, folder): if folder.has_child('__init__.py') and \ @@ -270,22 +217,9 @@ def get_classes(self, task_handle=taskhandle.NullTaskHandle()): def __str__(self): return str(self.module_cache) + str(self.object_info) + @utils.deprecated('Use `libutils.modname` instead') def modname(self, resource): - if resource.is_folder(): - module_name = resource.name - source_folder = resource.parent - elif resource.name == '__init__.py': - module_name = resource.parent.name - source_folder = resource.parent.parent - else: - module_name = resource.name[:-3] - source_folder = resource.parent - - while source_folder != source_folder.parent and \ - source_folder.has_child('__init__.py'): - module_name = source_folder.name + '.' + module_name - source_folder = source_folder.parent - return module_name + return rope.base.libutils.modname(resource) @property @utils.cacheit @@ -355,9 +289,11 @@ def perform_soa_on_changed_scopes(project, resource, old_contents): new_contents = resource.read() # detecting changes in new_contents relative to old_contents detector = _TextChangeDetector(new_contents, old_contents) + def search_subscopes(pydefined): scope = pydefined.get_scope() return detector.is_changed(scope.get_start(), scope.get_end()) + def should_analyze(pydefined): scope = pydefined.get_scope() start = scope.get_start() diff --git a/pymode/libs2/rope/base/pynames.py b/pymode/libs2/rope/base/pynames.py index 79bba156..5d489814 100644 --- a/pymode/libs2/rope/base/pynames.py +++ b/pymode/libs2/rope/base/pynames.py @@ -57,7 +57,7 @@ def __init__(self, ast_node, levels=None, evaluation='', """ self.ast_node = ast_node - if levels == None: + if levels is None: self.levels = [] else: self.levels = levels @@ -112,15 +112,16 @@ def _get_pymodule(self): if self.pymodule.get() is None: pycore = self.importing_module.pycore if self.resource is not None: - self.pymodule.set(pycore.resource_to_pyobject(self.resource)) + self.pymodule.set(pycore.project.get_pymodule(self.resource)) elif self.module_name is not None: try: if self.level == 0: - pymodule = pycore.get_module(self.module_name, - self._current_folder()) + pymodule = pycore.project.get_module( + self.module_name, self._current_folder()) else: - pymodule = pycore.get_relative_module( - self.module_name, self._current_folder(), self.level) + pymodule = pycore.project.get_relative_module( + self.module_name, self._current_folder(), + self.level) self.pymodule.set(pymodule) except exceptions.ModuleNotFoundError: pass @@ -172,6 +173,7 @@ def _circular_inference(): raise rope.base.pyobjects.IsBeingInferredError( 'Circular Object Inference') + class _Inferred(object): def __init__(self, get_inferred, concluded=None): diff --git a/pymode/libs2/rope/base/pyobjectsdef.py b/pymode/libs2/rope/base/pyobjectsdef.py index 50b24360..a738b4de 100644 --- a/pymode/libs2/rope/base/pyobjectsdef.py +++ b/pymode/libs2/rope/base/pyobjectsdef.py @@ -3,16 +3,17 @@ import rope.base.builtins import rope.base.oi.soi import rope.base.pyscopes +import rope.base.libutils from rope.base import (pynamesdef as pynames, exceptions, ast, astutils, pyobjects, fscommands, arguments, utils) -from rope.base.pyobjects import * class PyFunction(pyobjects.PyFunction): def __init__(self, pycore, ast_node, parent): - AbstractFunction.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) + rope.base.pyobjects.AbstractFunction.__init__(self) + rope.base.pyobjects.PyDefinedObject.__init__( + self, pycore, ast_node, parent) self.arguments = self.ast_node.args self.parameter_pyobjects = pynames._Inferred( self._infer_parameters, self.get_module()._get_concluded_data()) @@ -109,8 +110,9 @@ class PyClass(pyobjects.PyClass): def __init__(self, pycore, ast_node, parent): self.visitor_class = _ClassVisitor - AbstractClass.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) + rope.base.pyobjects.AbstractClass.__init__(self) + rope.base.pyobjects.PyDefinedObject.__init__( + self, pycore, ast_node, parent) self.parent = parent self._superclasses = self.get_module()._get_concluded_data() @@ -134,8 +136,9 @@ def _get_bases(self): base = rope.base.evaluate.eval_node(self.parent.get_scope(), base_name) if base is not None and \ - base.get_object().get_type() == get_base_type('Type'): - result.append(base.get_object()) + base.get_object().get_type() == \ + rope.base.pyobjects.get_base_type('Type'): + result.append(base.get_object()) return result def _create_scope(self): @@ -213,7 +216,7 @@ def __init__(self, pycore, resource=None, force_errors=False): self.resource = resource init_dot_py = self._get_init_dot_py() if init_dot_py is not None: - ast_node = pycore.resource_to_pyobject( + ast_node = pycore.project.get_pymodule( init_dot_py, force_errors=force_errors).get_ast() else: ast_node = ast.parse('\n') @@ -221,7 +224,7 @@ def __init__(self, pycore, resource=None, force_errors=False): def _create_structural_attributes(self): result = {} - modname = self.pycore.modname(self.resource) + modname = rope.base.libutils.modname(self.resource) extension_submodules = self.pycore._builtin_submodules(modname) for name, module in extension_submodules.iteritems(): result[name] = rope.base.builtins.BuiltinName(module) @@ -235,7 +238,7 @@ def _create_concluded_attributes(self): result = {} init_dot_py = self._get_init_dot_py() if init_dot_py: - init_object = self.pycore.resource_to_pyobject(init_dot_py) + init_object = self.pycore.project.get_pymodule(init_dot_py) result.update(init_object.get_attributes()) return result @@ -245,13 +248,14 @@ def _get_child_resources(self): if child.is_folder(): result[child.name] = child elif child.name.endswith('.py') and \ - child.name != '__init__.py': + child.name != '__init__.py': name = child.name[:-3] result[name] = child return result def _get_init_dot_py(self): - if self.resource is not None and self.resource.has_child('__init__.py'): + if self.resource is not None and \ + self.resource.has_child('__init__.py'): return self.resource.get_child('__init__.py') else: return None @@ -262,7 +266,7 @@ def _create_scope(self): def get_module(self): init_dot_py = self._get_init_dot_py() if init_dot_py: - return self.pycore.resource_to_pyobject(init_dot_py) + return self.pycore.project.get_pymodule(init_dot_py) return self @@ -329,7 +333,9 @@ def _FunctionDef(self, node): if isinstance(decorator, ast.Name) and decorator.id == 'property': if isinstance(self, _ClassVisitor): type_ = rope.base.builtins.Property(pyfunction) - arg = pynames.UnboundName(PyObject(self.owner_object)) + arg = pynames.UnboundName( + rope.base.pyobjects.PyObject(self.owner_object)) + def _eval(type_=type_, arg=arg): return type_.get_property_object( arguments.ObjectArguments([arg])) @@ -347,7 +353,7 @@ def _AugAssign(self, node): pass def _For(self, node): - names = self._update_evaluated(node.target, node.iter, + names = self._update_evaluated(node.target, node.iter, # noqa '.__iter__().next()') for child in node.body + node.orelse: ast.walk(child, self) @@ -362,7 +368,7 @@ def _assigned(self, name, assignment): self.names[name] = pyname def _update_evaluated(self, targets, assigned, - evaluation= '', eval_type=False): + evaluation='', eval_type=False): result = {} names = astutils.get_name_levels(targets) for name, levels in names: @@ -430,7 +436,8 @@ def _ImportFrom(self, node): def _is_ignored_import(self, imported_module): if not self.pycore.project.prefs.get('ignore_bad_imports', False): return False - return not isinstance(imported_module.get_object(), AbstractModule) + return not isinstance(imported_module.get_object(), + rope.base.pyobjects.AbstractModule) def _Global(self, node): module = self.get_module() diff --git a/pymode/libs2/rope/base/pyscopes.py b/pymode/libs2/rope/base/pyscopes.py index a00381b7..0bed19a9 100644 --- a/pymode/libs2/rope/base/pyscopes.py +++ b/pymode/libs2/rope/base/pyscopes.py @@ -230,8 +230,8 @@ def get_holding_scope(self, module_scope, lineno, line_indents=None): current_scope = module_scope new_scope = current_scope while new_scope is not None and \ - (new_scope.get_kind() == 'Module' or - self._get_scope_indents(new_scope) <= line_indents): + (new_scope.get_kind() == 'Module' or + self._get_scope_indents(new_scope) <= line_indents): current_scope = new_scope if current_scope.get_start() == lineno and \ current_scope.get_kind() != 'Module': @@ -268,7 +268,7 @@ def find_scope_end(self, scope): else: body_indents = self._get_body_indents(scope) for l in self.logical_lines.generate_starts( - min(end + 1, self.lines.length()), self.lines.length() + 1): + min(end + 1, self.lines.length()), self.lines.length() + 1): if not self._is_empty_line(l): if self.get_indents(l) < body_indents: return end @@ -288,6 +288,7 @@ def code(self): def logical_lines(self): return self.pymodule.logical_lines + class TemporaryScope(Scope): """Currently used for list comprehensions and generator expressions diff --git a/pymode/libs2/rope/base/resourceobserver.py b/pymode/libs2/rope/base/resourceobserver.py index 6d1accbc..7c0937d5 100644 --- a/pymode/libs2/rope/base/resourceobserver.py +++ b/pymode/libs2/rope/base/resourceobserver.py @@ -231,7 +231,8 @@ def _search_resource_changes(self, resource): def _is_changed(self, resource): if self.resources[resource] is None: return False - return self.resources[resource] != self.timekeeper.get_indicator(resource) + return self.resources[resource] != \ + self.timekeeper.get_indicator(resource) def _calculate_new_resource(self, main, new_main, resource): if new_main is None: diff --git a/pymode/libs2/rope/base/resources.py b/pymode/libs2/rope/base/resources.py index 46beadb0..aac755f0 100644 --- a/pymode/libs2/rope/base/resources.py +++ b/pymode/libs2/rope/base/resources.py @@ -1,9 +1,37 @@ +"""Files and folders in a project are represented as resource objects. + +Files and folders are access through `Resource` objects. `Resource` has +two subclasses: `File` and `Folder`. What we care about is that +refactorings and `rope.base.change.Change`s use resources. + +There are two options to create a `Resource` for a path in a project. +Note that in these examples `path` is the path to a file or folder +relative to the project's root. A project's root folder is represented +by an empty string. + + 1) Use the `rope.base.Project.get_resource()` method. E.g.: + + myresource = myproject.get_resource(path) + + + 2) Use the `rope.base.libutils` module. `libutils` has a function + named `path_to_resource()`. It takes a project and a path: + + from rope.base import libutils + + myresource = libutils.path_to_resource(myproject, path) + +Once we have a `Resource`, we can retrieve information from it, like +getting the path relative to the project's root (via `path`), reading +from and writing to the resource, moving the resource, etc. +""" + import os import re -import rope.base.change -import rope.base.fscommands +from rope.base import change from rope.base import exceptions +from rope.base import fscommands class Resource(object): @@ -15,12 +43,12 @@ def __init__(self, project, path): def move(self, new_location): """Move resource to `new_location`""" - self._perform_change(rope.base.change.MoveResource(self, new_location), + self._perform_change(change.MoveResource(self, new_location), 'Moving <%s> to <%s>' % (self.path, new_location)) def remove(self): """Remove resource from the project""" - self._perform_change(rope.base.change.RemoveResource(self), + self._perform_change(change.RemoveResource(self), 'Removing <%s>' % self.path) def is_folder(self): @@ -66,7 +94,7 @@ def __hash__(self): return hash(self.path) def _perform_change(self, change_, description): - changes = rope.base.change.ChangeSet(description) + changes = change.ChangeSet(description) changes.add_change(change_) self.project.do(changes) @@ -80,7 +108,7 @@ def __init__(self, project, name): def read(self): data = self.read_bytes() try: - return rope.base.fscommands.file_data_to_unicode(data) + return fscommands.file_data_to_unicode(data) except UnicodeDecodeError, e: raise exceptions.ModuleDecodeError(self.path, e.reason) @@ -93,7 +121,7 @@ def write(self, contents): return except IOError: pass - self._perform_change(rope.base.change.ChangeContents(self, contents), + self._perform_change(change.ChangeContents(self, contents), 'Writing file <%s>' % self.path) def is_folder(self): @@ -114,8 +142,12 @@ def is_folder(self): def get_children(self): """Return the children of this folder""" + try: + children = os.listdir(self.real_path) + except OSError: + return [] result = [] - for name in os.listdir(self.real_path): + for name in children: try: child = self.get_child(name) except exceptions.ResourceNotFoundError: @@ -126,13 +158,13 @@ def get_children(self): def create_file(self, file_name): self._perform_change( - rope.base.change.CreateFile(self, file_name), + change.CreateFile(self, file_name), 'Creating file <%s>' % self._get_child_path(file_name)) return self.get_child(file_name) def create_folder(self, folder_name): self._perform_change( - rope.base.change.CreateFolder(self, folder_name), + change.CreateFolder(self, folder_name), 'Creating folder <%s>' % self._get_child_path(folder_name)) return self.get_child(folder_name) @@ -187,8 +219,8 @@ def set_patterns(self, patterns): def _add_pattern(self, pattern): re_pattern = pattern.replace('.', '\\.').\ - replace('*', '[^/]*').replace('?', '[^/]').\ - replace('//', '/(.*/)?') + replace('*', '[^/]*').replace('?', '[^/]').\ + replace('//', '/(.*/)?') re_pattern = '^(.*/)?' + re_pattern + '(/.*)?$' self.compiled_patterns.append(re.compile(re_pattern)) diff --git a/pymode/libs2/rope/base/stdmods.py b/pymode/libs2/rope/base/stdmods.py index b6c9839b..457a4fac 100644 --- a/pymode/libs2/rope/base/stdmods.py +++ b/pymode/libs2/rope/base/stdmods.py @@ -6,12 +6,15 @@ def _stdlib_path(): import distutils.sysconfig - return distutils.sysconfig.get_python_lib(standard_lib=True) + return distutils.sysconfig.get_python_lib(standard_lib=True, + plat_specific=True) + @utils.cached(1) def standard_modules(): return python_modules() | dynload_modules() + @utils.cached(1) def python_modules(): result = set() @@ -27,6 +30,7 @@ def python_modules(): result.add(name[:-3]) return result + @utils.cached(1) def dynload_modules(): result = set(sys.builtin_module_names) @@ -35,6 +39,8 @@ def dynload_modules(): for name in os.listdir(dynload_path): path = os.path.join(dynload_path, name) if os.path.isfile(path): - if name.endswith('.so') or name.endswith('.dll'): + if name.endswith('.dll'): result.add(os.path.splitext(name)[0]) + if name.endswith('.so'): + result.add(os.path.splitext(name)[0].replace('module', '')) return result diff --git a/pymode/libs2/rope/base/taskhandle.py b/pymode/libs2/rope/base/taskhandle.py index 6d4ed856..c1f01b98 100644 --- a/pymode/libs2/rope/base/taskhandle.py +++ b/pymode/libs2/rope/base/taskhandle.py @@ -1,5 +1,3 @@ -import warnings - from rope.base import exceptions diff --git a/pymode/libs2/rope/base/utils.py b/pymode/libs2/rope/base/utils.py index e35ecbf3..11556c13 100644 --- a/pymode/libs2/rope/base/utils.py +++ b/pymode/libs2/rope/base/utils.py @@ -5,6 +5,7 @@ def saveit(func): """A decorator that caches the return value of a function""" name = '_' + func.__name__ + def _wrapper(self, *args, **kwds): if not hasattr(self, name): setattr(self, name, func(self, *args, **kwds)) @@ -13,10 +14,12 @@ def _wrapper(self, *args, **kwds): cacheit = saveit + def prevent_recursion(default): """A decorator that returns the return value of `default` in recursions""" def decorator(func): name = '_calling_%s_' % func.__name__ + def newfunc(self, *args, **kwds): if getattr(self, name, False): return default() @@ -46,6 +49,7 @@ def deprecated(message=None): def _decorator(func, message=message): if message is None: message = '%s is deprecated' % func.__name__ + def newfunc(*args, **kwds): warnings.warn(message, DeprecationWarning, stacklevel=2) return func(*args, **kwds) @@ -59,6 +63,7 @@ def decorator(func): return _Cached(func, count) return decorator + class _Cached(object): def __init__(self, func, count): diff --git a/pymode/libs2/rope/base/worder.py b/pymode/libs2/rope/base/worder.py index 08d75f34..c85c6b36 100644 --- a/pymode/libs2/rope/base/worder.py +++ b/pymode/libs2/rope/base/worder.py @@ -257,8 +257,10 @@ def get_splitted_primary_before(self, offset): return (self.raw[real_start:end], '', offset) last_dot_position = word_start if self.code[word_start] != '.': - last_dot_position = self._find_last_non_space_char(word_start - 1) - last_char_position = self._find_last_non_space_char(last_dot_position - 1) + last_dot_position = \ + self._find_last_non_space_char(word_start - 1) + last_char_position = \ + self._find_last_non_space_char(last_dot_position - 1) if self.code[word_start].isspace(): word_start = offset return (self.raw[real_start:last_char_position + 1], @@ -304,8 +306,8 @@ def is_a_function_being_called(self, offset): word_end = self._find_word_end(offset) + 1 next_char = self._find_first_non_space_char(word_end) return next_char < len(self.code) and \ - self.code[next_char] == '(' and \ - not self.is_a_class_or_function_name_in_header(offset) + self.code[next_char] == '(' and \ + not self.is_a_class_or_function_name_in_header(offset) def _find_import_end(self, start): return self._get_line_end(start) @@ -337,7 +339,7 @@ def is_from_statement_module(self, offset): def is_a_name_after_from_import(self, offset): try: - if len(self.code) > offset and self.code[offset] == '\n': + if len(self.code) > offset and self.code[offset] == '\n': line_start = self._get_line_start(offset - 1) else: line_start = self._get_line_start(offset) @@ -405,7 +407,6 @@ def is_on_function_call_keyword(self, offset): def find_parens_start_from_inside(self, offset): stop = self._get_line_start(offset) - opens = 1 while offset > stop: if self.code[offset] == '(': break @@ -501,7 +502,7 @@ def is_assigned_in_a_tuple_assignment(self, offset): parens_start = self.find_parens_start_from_inside(offset) # XXX: only handling (x, y) = value return offset < equals_offset and \ - self.code[start:parens_start].strip() == '' + self.code[start:parens_start].strip() == '' def get_function_and_args_in_header(self, offset): offset = self.find_function_offset(offset) @@ -518,7 +519,7 @@ def find_function_offset(self, offset, definition='def '): return self._find_first_non_space_char(def_) def get_lambda_and_args(self, offset): - offset = self.find_function_offset(offset, definition = 'lambda ') - lparens, rparens = self.get_word_parens_range(offset, opening=' ', closing=':') + offset = self.find_function_offset(offset, definition='lambda ') + lparens, rparens = self.get_word_parens_range(offset, opening=' ', + closing=':') return self.raw[offset:rparens + 1] - diff --git a/pymode/libs2/rope/contrib/autoimport.py b/pymode/libs2/rope/contrib/autoimport.py index 4b7b5b05..9670080c 100644 --- a/pymode/libs2/rope/contrib/autoimport.py +++ b/pymode/libs2/rope/contrib/autoimport.py @@ -1,7 +1,13 @@ import re -from rope.base import (exceptions, pynames, resourceobserver, - taskhandle, pyobjects, builtins, resources) +from rope.base import builtins +from rope.base import exceptions +from rope.base import libutils +from rope.base import pynames +from rope.base import pyobjects +from rope.base import resources +from rope.base import resourceobserver +from rope.base import taskhandle from rope.refactor import importutils @@ -65,11 +71,10 @@ def get_all_names(self): def get_name_locations(self, name): """Return a list of ``(resource, lineno)`` tuples""" result = [] - pycore = self.project.pycore for module in self.names: if name in self.names[module]: try: - pymodule = pycore.get_module(module) + pymodule = self.project.get_module(module) if name in pymodule: pyname = pymodule[name] module, lineno = pyname.get_definition_location() @@ -91,7 +96,7 @@ def generate_cache(self, resources=None, underlined=None, """ if resources is None: - resources = self.project.pycore.get_python_files() + resources = self.project.get_python_files() job_set = task_handle.create_jobset( 'Generatig autoimport cache', len(resources)) for file in resources: @@ -107,7 +112,7 @@ def generate_modules_cache(self, modules, underlined=None, for modname in modules: job_set.started_job('Working on <%s>' % modname) if modname.endswith('.*'): - mod = self.project.pycore.find_module(modname[:-2]) + mod = self.project.find_module(modname[:-2]) if mod: for sub in submodules(mod): self.update_resource(sub, underlined) @@ -130,13 +135,13 @@ def find_insertion_line(self, code): if match is not None: code = code[:match.start()] try: - pymodule = self.project.pycore.get_string_module(code) + pymodule = libutils.get_string_module(self.project, code) except exceptions.ModuleSyntaxError: return 1 testmodname = '__rope_testmodule_rope' importinfo = importutils.NormalImport(((testmodname, None),)) - module_imports = importutils.get_module_imports( - self.project.pycore, pymodule) + module_imports = importutils.get_module_imports(self.project, + pymodule) module_imports.add_import(importinfo) code = module_imports.get_changed_source() offset = code.index(testmodname) @@ -146,7 +151,7 @@ def find_insertion_line(self, code): def update_resource(self, resource, underlined=None): """Update the cache for global names in `resource`""" try: - pymodule = self.project.pycore.resource_to_pyobject(resource) + pymodule = self.project.get_pymodule(resource) modname = self._module_name(resource) self._add_names(pymodule, modname, underlined) except exceptions.ModuleSyntaxError: @@ -158,13 +163,13 @@ def update_module(self, modname, underlined=None): `modname` is the name of a module. """ try: - pymodule = self.project.pycore.get_module(modname) + pymodule = self.project.get_module(modname) self._add_names(pymodule, modname, underlined) except exceptions.ModuleNotFoundError: pass def _module_name(self, resource): - return self.project.pycore.modname(resource) + return libutils.modname(resource) def _add_names(self, pymodule, modname, underlined): if underlined is None: diff --git a/pymode/libs2/rope/contrib/codeassist.py b/pymode/libs2/rope/contrib/codeassist.py index 37433c2a..48b4a813 100644 --- a/pymode/libs2/rope/contrib/codeassist.py +++ b/pymode/libs2/rope/contrib/codeassist.py @@ -4,8 +4,15 @@ import rope.base.codeanalyze import rope.base.evaluate -from rope.base import pyobjects, pyobjectsdef, pynames, builtins, exceptions, worder -from rope.base.codeanalyze import SourceLinesAdapter +from rope.base import builtins +from rope.base import exceptions +from rope.base import libutils +from rope.base import pynames +from rope.base import pynamesdef +from rope.base import pyobjects +from rope.base import pyobjectsdef +from rope.base import pyscopes +from rope.base import worder from rope.contrib import fixsyntax from rope.refactor import functionutils @@ -53,9 +60,7 @@ def starting_offset(source_code, offset): def get_doc(project, source_code, offset, resource=None, maxfixes=1): """Get the pydoc""" - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() + fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is None: return None @@ -88,9 +93,7 @@ def get_calltip(project, source_code, offset, resource=None, If `remove_self` is `True`, the first parameter whose name is self will be removed for methods. """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() + fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is None: return None @@ -108,9 +111,7 @@ def get_definition_location(project, source_code, offset, location cannot be determined ``(None, None)`` is returned. """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() + fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is not None: module, lineno = pyname.get_definition_location() @@ -126,6 +127,64 @@ def find_occurrences(*args, **kwds): return rope.contrib.findit.find_occurrences(*args, **kwds) +def get_canonical_path(project, resource, offset): + """Get the canonical path to an object. + + Given the offset of the object, this returns a list of + (name, name_type) tuples representing the canonical path to the + object. For example, the 'x' in the following code: + + class Foo(object): + def bar(self): + class Qux(object): + def mux(self, x): + pass + + we will return: + + [('Foo', 'CLASS'), ('bar', 'FUNCTION'), ('Qux', 'CLASS'), + ('mux', 'FUNCTION'), ('x', 'PARAMETER')] + + `resource` is a `rope.base.resources.Resource` object. + + `offset` is the offset of the pyname you want the path to. + + """ + # Retrieve the PyName. + pymod = project.get_pymodule(resource) + pyname = rope.base.evaluate.eval_location(pymod, offset) + + # Now get the location of the definition and its containing scope. + defmod, lineno = pyname.get_definition_location() + if not defmod: + return None + scope = defmod.get_scope().get_inner_scope_for_line(lineno) + + # Start with the name of the object we're interested in. + names = [] + if isinstance(pyname, pynamesdef.ParameterName): + names = [(worder.get_name_at(pymod.get_resource(), offset), + 'PARAMETER') ] + elif isinstance(pyname, pynamesdef.AssignedName): + names = [(worder.get_name_at(pymod.get_resource(), offset), + 'VARIABLE')] + + # Collect scope names. + while scope.parent: + if isinstance(scope, pyscopes.FunctionScope): + scope_type = 'FUNCTION' + elif isinstance(scope, pyscopes.ClassScope): + scope_type = 'CLASS' + else: + scope_type = None + names.append((scope.pyobject.get_name(), scope_type)) + scope = scope.parent + + names.append((defmod.get_resource().real_path, 'MODULE')) + names.reverse() + return names + + class CompletionProposal(object): """A completion proposal @@ -184,15 +243,14 @@ def type(self): if isinstance(pyobject, builtins.BuiltinFunction): return 'function' elif isinstance(pyobject, builtins.BuiltinClass): - clsobj = pyobject.builtin return 'class' elif isinstance(pyobject, builtins.BuiltinObject) or \ - isinstance(pyobject, builtins.BuiltinName): + isinstance(pyobject, builtins.BuiltinName): return 'instance' elif isinstance(pyname, pynames.ImportedModule): return 'module' elif isinstance(pyname, pynames.ImportedName) or \ - isinstance(pyname, pynames.DefinedName): + isinstance(pyname, pynames.DefinedName): pyobject = pyname.get_object() if isinstance(pyobject, pyobjects.AbstractFunction): return 'function' @@ -222,7 +280,7 @@ def get_doc(self): @property def kind(self): - warnings.warn("the proposal's `kind` property is deprecated, " \ + warnings.warn("the proposal's `kind` property is deprecated, " "use `scope` instead") return self.scope @@ -294,7 +352,6 @@ class _PythonCodeAssist(object): def __init__(self, project, source_code, offset, resource=None, maxfixes=1, later_locals=True): self.project = project - self.pycore = self.project.pycore self.code = source_code self.resource = resource self.maxfixes = maxfixes @@ -309,7 +366,7 @@ def _find_starting_offset(self, source_code, offset): current_offset = offset - 1 while current_offset >= 0 and (source_code[current_offset].isalnum() or source_code[current_offset] in '_'): - current_offset -= 1; + current_offset -= 1 return current_offset + 1 def _matching_keywords(self, starting): @@ -339,11 +396,12 @@ def _dotted_completions(self, module_scope, holding_scope): compl_scope = 'imported' for name, pyname in element.get_attributes().items(): if name.startswith(self.starting): - result[name] = CompletionProposal(name, compl_scope, pyname) + result[name] = CompletionProposal(name, compl_scope, + pyname) return result def _undotted_completions(self, scope, result, lineno=None): - if scope.parent != None: + if scope.parent is not None: self._undotted_completions(scope.parent, result) if lineno is None: names = scope.get_propagated_names() @@ -388,7 +446,7 @@ def _is_defined_after(self, scope, pyname, lineno): def _code_completions(self): lineno = self.code.count('\n', 0, self.offset) + 1 - fixer = fixsyntax.FixSyntax(self.pycore, self.code, + fixer = fixsyntax.FixSyntax(self.project, self.code, self.resource, self.maxfixes) pymodule = fixer.get_pymodule() module_scope = pymodule.get_scope() @@ -413,24 +471,21 @@ def _keyword_parameters(self, pymodule, scope): if offset == 0: return {} word_finder = worder.Worder(self.code, True) - lines = SourceLinesAdapter(self.code) - lineno = lines.get_line_number(offset) if word_finder.is_on_function_call_keyword(offset - 1): - name_finder = rope.base.evaluate.ScopeNameFinder(pymodule) function_parens = word_finder.\ find_parens_start_from_inside(offset - 1) primary = word_finder.get_primary_at(function_parens - 1) try: function_pyname = rope.base.evaluate.\ eval_str(scope, primary) - except exceptions.BadIdentifierError, e: + except exceptions.BadIdentifierError: return {} if function_pyname is not None: pyobject = function_pyname.get_object() if isinstance(pyobject, pyobjects.AbstractFunction): pass elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: + '__init__' in pyobject: pyobject = pyobject['__init__'].get_object() elif '__call__' in pyobject: pyobject = pyobject['__call__'].get_object() @@ -455,12 +510,12 @@ def __init__(self, code_assist_proposals, scopepref=None, typepref=None): self.proposals = code_assist_proposals if scopepref is None: scopepref = ['parameter_keyword', 'local', 'global', 'imported', - 'attribute', 'builtin', 'keyword'] + 'attribute', 'builtin', 'keyword'] self.scopepref = scopepref if typepref is None: typepref = ['class', 'function', 'instance', 'module', None] self.typerank = dict((type, index) - for index, type in enumerate(typepref)) + for index, type in enumerate(typepref)) def get_sorted_proposal_list(self): """Return a list of `CodeAssistProposal`""" @@ -471,7 +526,7 @@ def get_sorted_proposal_list(self): for scope in self.scopepref: scope_proposals = proposals.get(scope, []) scope_proposals = [proposal for proposal in scope_proposals - if proposal.type in self.typerank] + if proposal.type in self.typerank] scope_proposals.sort(self._proposal_cmp) result.extend(scope_proposals) return result @@ -526,7 +581,8 @@ def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False): def _get_class_docstring(self, pyclass): contents = self._trim_docstring(pyclass.get_doc(), 2) supers = [super.get_name() for super in pyclass.get_superclasses()] - doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents + doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) \ + + contents if '__init__' in pyclass: init = pyclass['__init__'].get_object() @@ -544,7 +600,7 @@ def _get_function_docstring(self, pyfunction): def _is_method(self, pyfunction): return isinstance(pyfunction, pyobjects.PyFunction) and \ - isinstance(pyfunction.parent, pyobjects.PyClass) + isinstance(pyfunction.parent, pyobjects.PyClass) def _get_single_function_docstring(self, pyfunction): signature = self._get_function_signature(pyfunction) @@ -579,7 +635,6 @@ def _location(self, pyobject, add_module=False): parent = parent.parent if add_module: if isinstance(pyobject, pyobjects.PyFunction): - module = pyobject.get_module() location.insert(0, self._get_module(pyobject)) if isinstance(parent, builtins.BuiltinModule): location.insert(0, parent.get_name() + '.') @@ -590,7 +645,7 @@ def _get_module(self, pyfunction): if module is not None: resource = module.get_resource() if resource is not None: - return pyfunction.pycore.modname(resource) + '.' + return libutils.modname(resource) + '.' return '' def _trim_docstring(self, docstring, indents=0): diff --git a/pymode/libs2/rope/contrib/finderrors.py b/pymode/libs2/rope/contrib/finderrors.py index c8cf7e15..9ee7dd15 100644 --- a/pymode/libs2/rope/contrib/finderrors.py +++ b/pymode/libs2/rope/contrib/finderrors.py @@ -31,7 +31,7 @@ def find_errors(project, resource): It returns a list of `Error`\s. """ - pymodule = project.pycore.resource_to_pyobject(resource) + pymodule = project.get_pymodule(resource) finder = _BadAccessFinder(pymodule) ast.walk(pymodule.get_ast(), finder) return finder.errors diff --git a/pymode/libs2/rope/contrib/findit.py b/pymode/libs2/rope/contrib/findit.py index e8ddd7e5..93eb01a8 100644 --- a/pymode/libs2/rope/contrib/findit.py +++ b/pymode/libs2/rope/contrib/findit.py @@ -7,7 +7,8 @@ def find_occurrences(project, resource, offset, unsure=False, resources=None, - in_hierarchy=False, task_handle=taskhandle.NullTaskHandle()): + in_hierarchy=False, + task_handle=taskhandle.NullTaskHandle()): """Return a list of `Location`\s If `unsure` is `True`, possible matches are returned, too. You @@ -18,16 +19,17 @@ def find_occurrences(project, resource, offset, unsure=False, resources=None, """ name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) + this_pymodule = project.get_pymodule(resource) primary, pyname = rope.base.evaluate.eval_location2( this_pymodule, offset) + def is_match(occurrence): return unsure finder = occurrences.create_finder( - project.pycore, name, pyname, unsure=is_match, + project, name, pyname, unsure=is_match, in_hierarchy=in_hierarchy, instance=primary) if resources is None: - resources = project.pycore.get_python_files() + resources = project.get_python_files() job_set = task_handle.create_jobset('Finding Occurrences', count=len(resources)) return _find_locations(finder, resources, job_set) @@ -41,7 +43,7 @@ def find_implementations(project, resource, offset, resources=None, `Location`\s. """ name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) + this_pymodule = project.get_pymodule(resource) pyname = rope.base.evaluate.eval_location(this_pymodule, offset) if pyname is not None: pyobject = pyname.get_object() @@ -50,17 +52,19 @@ def find_implementations(project, resource, offset, resources=None, raise exceptions.BadIdentifierError('Not a method!') else: raise exceptions.BadIdentifierError('Cannot resolve the identifier!') + def is_defined(occurrence): if not occurrence.is_defined(): return False + def not_self(occurrence): if occurrence.get_pyname().get_object() == pyname.get_object(): return False filters = [is_defined, not_self, occurrences.InHierarchyFilter(pyname, True)] - finder = occurrences.Finder(project.pycore, name, filters=filters) + finder = occurrences.Finder(project, name, filters=filters) if resources is None: - resources = project.pycore.get_python_files() + resources = project.get_python_files() job_set = task_handle.create_jobset('Finding Implementations', count=len(resources)) return _find_locations(finder, resources, job_set) @@ -72,19 +76,19 @@ def find_definition(project, code, offset, resource=None, maxfixes=1): A `Location` object is returned if the definition location can be determined, otherwise ``None`` is returned. """ - fixer = fixsyntax.FixSyntax(project.pycore, code, resource, maxfixes) - main_module = fixer.get_pymodule() + fixer = fixsyntax.FixSyntax(project, code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is not None: module, lineno = pyname.get_definition_location() name = rope.base.worder.Worder(code).get_word_at(offset) if lineno is not None: start = module.lines.get_line_start(lineno) + def check_offset(occurrence): if occurrence.offset < start: return False pyname_filter = occurrences.PyNameFilter(pyname) - finder = occurrences.Finder(project.pycore, name, + finder = occurrences.Finder(project, name, [check_offset, pyname_filter]) for occurrence in finder.find_occurrences(pymodule=module): return Location(occurrence) diff --git a/pymode/libs2/rope/contrib/fixmodnames.py b/pymode/libs2/rope/contrib/fixmodnames.py index 7092f131..d8bd3da1 100644 --- a/pymode/libs2/rope/contrib/fixmodnames.py +++ b/pymode/libs2/rope/contrib/fixmodnames.py @@ -15,7 +15,7 @@ argument. """ -from rope.base import change, taskhandle +from rope.base import taskhandle from rope.contrib import changestack from rope.refactor import rename @@ -57,7 +57,7 @@ def _count_fixes(self, fixer): return len(list(self._tobe_fixed(fixer))) def _tobe_fixed(self, fixer): - for resource in self.project.pycore.get_python_files(): + for resource in self.project.get_python_files(): modname = self._name(resource) if modname != fixer(modname): yield resource diff --git a/pymode/libs2/rope/contrib/fixsyntax.py b/pymode/libs2/rope/contrib/fixsyntax.py index 870046c8..aab5c78c 100644 --- a/pymode/libs2/rope/contrib/fixsyntax.py +++ b/pymode/libs2/rope/contrib/fixsyntax.py @@ -1,13 +1,16 @@ import rope.base.codeanalyze import rope.base.evaluate -from rope.base import worder, exceptions, utils +from rope.base import exceptions +from rope.base import libutils +from rope.base import utils +from rope.base import worder from rope.base.codeanalyze import ArrayLinesAdapter, LogicalLineFinder class FixSyntax(object): - def __init__(self, pycore, code, resource, maxfixes=1): - self.pycore = pycore + def __init__(self, project, code, resource, maxfixes=1): + self.project = project self.code = code self.resource = resource self.maxfixes = maxfixes @@ -22,10 +25,11 @@ def get_pymodule(self): try: if tries == 0 and self.resource is not None and \ self.resource.read() == code: - return self.pycore.resource_to_pyobject(self.resource, - force_errors=True) - return self.pycore.get_string_module( - code, resource=self.resource, force_errors=True) + return self.project.get_pymodule(self.resource, + force_errors=True) + return libutils.get_string_module( + self.project, code, resource=self.resource, + force_errors=True) except exceptions.ModuleSyntaxError, e: if msg is None: msg = '%s:%s %s' % (e.filename, e.lineno, e.message_) @@ -34,7 +38,9 @@ def get_pymodule(self): self.commenter.comment(e.lineno) code = '\n'.join(self.commenter.lines) else: - raise exceptions.ModuleSyntaxError(e.filename, e.lineno, msg) + raise exceptions.ModuleSyntaxError( + e.filename, e.lineno, + 'Failed to fix error: {}'.format(msg)) @property @utils.saveit @@ -43,6 +49,7 @@ def commenter(self): def pyname_at(self, offset): pymodule = self.get_pymodule() + def old_pyname(): word_finder = worder.Worder(self.code, True) expression = word_finder.get_primary_at(offset) @@ -51,6 +58,7 @@ def old_pyname(): scope = pymodule.get_scope().get_inner_scope_for_line(lineno) return rope.base.evaluate.eval_str(scope, expression) new_code = pymodule.source_code + def new_pyname(): newoffset = self.commenter.transfered_offset(offset) return rope.base.evaluate.eval_location(pymodule, newoffset) @@ -108,7 +116,6 @@ def _get_block_end(self, lineno): return end_line def _get_stmt_end(self, lineno): - end_line = lineno base_indents = _get_line_indents(self.lines[lineno]) for i in range(lineno + 1, len(self.lines)): if _get_line_indents(self.lines[i]) <= base_indents: @@ -117,7 +124,7 @@ def _get_stmt_end(self, lineno): def _fix_incomplete_try_blocks(self, lineno, indents): block_start = lineno - last_indents = current_indents = indents + last_indents = indents while block_start > 0: block_start = rope.base.codeanalyze.get_block_start( ArrayLinesAdapter(self.lines), block_start) - 1 @@ -155,6 +162,7 @@ def _insert(self, lineno, line): self.origs.insert(lineno, self.origs[lineno]) self.lines.insert(lineno, line) + def _logical_start(lines, lineno, check_prev=False): logical_finder = LogicalLineFinder(ArrayLinesAdapter(lines)) if check_prev: diff --git a/pymode/libs2/rope/contrib/generate.py b/pymode/libs2/rope/contrib/generate.py index 4d850da0..825f26d6 100644 --- a/pymode/libs2/rope/contrib/generate.py +++ b/pymode/libs2/rope/contrib/generate.py @@ -1,5 +1,7 @@ import rope.base.evaluate -from rope.base import change, pyobjects, exceptions, pynames, worder, codeanalyze +from rope.base import libutils +from rope.base import (change, pyobjects, exceptions, pynames, worder, + codeanalyze) from rope.refactor import sourceutils, importutils, functionutils, suites @@ -24,6 +26,7 @@ def create_module(project, name, sourcefolder=None): parent = parent.get_child(package) return parent.create_file(packages[-1] + '.py') + def create_package(project, name, sourcefolder=None): """Creates a package and returns a `rope.base.resources.Folder`""" if sourcefolder is None: @@ -55,14 +58,16 @@ def _check_exceptional_conditions(self): 'Element <%s> already exists.' % self.name) if not self.info.primary_is_found(): raise exceptions.RefactoringError( - 'Cannot determine the scope <%s> should be defined in.' % self.name) + 'Cannot determine the scope <%s> should be defined in.' % + self.name) def get_changes(self): changes = change.ChangeSet('Generate %s <%s>' % (self._get_element_kind(), self.name)) indents = self.info.get_scope_indents() blanks = self.info.get_blank_lines() - base_definition = sourceutils.fix_indentation(self._get_element(), indents) + base_definition = sourceutils.fix_indentation(self._get_element(), + indents) definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1] resource = self.info.get_insertion_resource() @@ -130,18 +135,19 @@ class GenerateModule(_Generate): def get_changes(self): package = self.info.get_package() changes = change.ChangeSet('Generate Module <%s>' % self.name) - new_resource = self.project.get_file('%s/%s.py' % (package.path, self.name)) + new_resource = self.project.get_file('%s/%s.py' % + (package.path, self.name)) if new_resource.exists(): raise exceptions.RefactoringError( 'Module <%s> already exists' % new_resource.path) changes.add_change(change.CreateResource(new_resource)) changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) + self.project, self.resource, new_resource)) return changes def get_location(self): package = self.info.get_package() - return (package.get_child('%s.py' % self.name) , 1) + return (package.get_child('%s.py' % self.name), 1) class GeneratePackage(_Generate): @@ -149,13 +155,14 @@ class GeneratePackage(_Generate): def get_changes(self): package = self.info.get_package() changes = change.ChangeSet('Generate Package <%s>' % self.name) - new_resource = self.project.get_folder('%s/%s' % (package.path, self.name)) + new_resource = self.project.get_folder('%s/%s' % + (package.path, self.name)) if new_resource.exists(): raise exceptions.RefactoringError( 'Package <%s> already exists' % new_resource.path) changes.add_change(change.CreateResource(new_resource)) changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) + self.project, self.resource, new_resource)) child = self.project.get_folder(package.path + '/' + self.name) changes.add_change(change.CreateFile(child, '__init__.py')) return changes @@ -163,14 +170,14 @@ def get_changes(self): def get_location(self): package = self.info.get_package() child = package.get_child(self.name) - return (child.get_child('__init__.py') , 1) + return (child.get_child('__init__.py'), 1) -def _add_import_to_module(pycore, resource, imported): - pymodule = pycore.resource_to_pyobject(resource) - import_tools = importutils.ImportTools(pycore) +def _add_import_to_module(project, resource, imported): + pymodule = project.get_pymodule(resource) + import_tools = importutils.ImportTools(project) module_imports = import_tools.module_imports(pymodule) - module_name = pycore.modname(imported) + module_name = libutils.modname(imported) new_import = importutils.NormalImport(((module_name, None), )) module_imports.add_import(new_import) return change.ChangeContents(resource, module_imports.get_changed_source()) @@ -182,7 +189,7 @@ def __init__(self, pycore, resource, offset): self.pycore = pycore self.resource = resource self.offset = offset - self.source_pymodule = self.pycore.resource_to_pyobject(resource) + self.source_pymodule = self.pycore.project.get_pymodule(resource) finder = rope.base.evaluate.ScopeNameFinder(self.source_pymodule) self.primary, self.pyname = finder.get_primary_and_pyname_at(offset) self._init_fields() @@ -264,7 +271,7 @@ def get_blank_lines(self): def get_package(self): primary = self.primary if self.primary is None: - return self.pycore.get_source_folders()[0] + return self.pycore.project.get_source_folders()[0] if isinstance(primary.get_object(), pyobjects.PyPackage): return primary.get_object().get_resource() raise exceptions.RefactoringError( @@ -304,15 +311,15 @@ def element_already_exists(self): def is_static_method(self): return self.primary is not None and \ - isinstance(self.primary.get_object(), pyobjects.PyClass) + isinstance(self.primary.get_object(), pyobjects.PyClass) def is_method(self): return self.primary is not None and \ - isinstance(self.primary.get_object().get_type(), pyobjects.PyClass) + isinstance(self.primary.get_object().get_type(), pyobjects.PyClass) def is_constructor(self): return self.pyname is not None and \ - isinstance(self.pyname.get_object(), pyobjects.PyClass) + isinstance(self.pyname.get_object(), pyobjects.PyClass) def is_instance(self): if self.pyname is None: diff --git a/pymode/libs2/rope/refactor/__init__.py b/pymode/libs2/rope/refactor/__init__.py index 10d734c3..4ef67513 100644 --- a/pymode/libs2/rope/refactor/__init__.py +++ b/pymode/libs2/rope/refactor/__init__.py @@ -45,8 +45,8 @@ monitoring the progress of refactorings. """ -from rope.refactor.importutils import ImportOrganizer -from rope.refactor.topackage import ModuleToPackage +from rope.refactor.importutils import ImportOrganizer # noqa +from rope.refactor.topackage import ModuleToPackage # noqa __all__ = ['rename', 'move', 'inline', 'extract', 'restructure', 'topackage', diff --git a/pymode/libs2/rope/refactor/change_signature.py b/pymode/libs2/rope/refactor/change_signature.py index a8c50d71..4279d9cf 100644 --- a/pymode/libs2/rope/refactor/change_signature.py +++ b/pymode/libs2/rope/refactor/change_signature.py @@ -1,7 +1,12 @@ import copy import rope.base.exceptions -from rope.base import pyobjects, taskhandle, evaluate, worder, codeanalyze, utils +from rope.base import codeanalyze +from rope.base import evaluate +from rope.base import pyobjects +from rope.base import taskhandle +from rope.base import utils +from rope.base import worder from rope.base.change import ChangeContents, ChangeSet from rope.refactor import occurrences, functionutils @@ -9,7 +14,7 @@ class ChangeSignature(object): def __init__(self, project, resource, offset): - self.pycore = project.pycore + self.project = project self.resource = resource self.offset = offset self._set_name_and_pyname() @@ -20,7 +25,7 @@ def __init__(self, project, resource, offset): def _set_name_and_pyname(self): self.name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) + this_pymodule = self.project.get_pymodule(self.resource) self.primary, self.pyname = evaluate.eval_location2( this_pymodule, self.offset) if self.pyname is None: @@ -42,21 +47,21 @@ def _set_name_and_pyname(self): def _change_calls(self, call_changer, in_hierarchy=None, resources=None, handle=taskhandle.NullTaskHandle()): if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() changes = ChangeSet('Changing signature of <%s>' % self.name) job_set = handle.create_jobset('Collecting Changes', len(resources)) finder = occurrences.create_finder( - self.pycore, self.name, self.pyname, instance=self.primary, + self.project, self.name, self.pyname, instance=self.primary, in_hierarchy=in_hierarchy and self.is_method()) if self.others: name, pyname = self.others constructor_finder = occurrences.create_finder( - self.pycore, name, pyname, only_calls=True) + self.project, name, pyname, only_calls=True) finder = _MultipleFinders([finder, constructor_finder]) for file in resources: job_set.started_job(file.path) change_calls = _ChangeCallsInModule( - self.pycore, finder, file, call_changer) + self.project, finder, file, call_changer) changed_file = change_calls.get_changed_module() if changed_file is not None: changes.add_change(ChangeContents(file, changed_file)) @@ -160,12 +165,15 @@ def change_definition(self, call): def change_call(self, primary, pyname, call): call_info = functionutils.CallInfo.read( primary, pyname, self.definition_info, call) - mapping = functionutils.ArgumentMapping(self.definition_info, call_info) + mapping = functionutils.ArgumentMapping(self.definition_info, + call_info) - for definition_info, changer in zip(self.changed_definition_infos, self.changers): + for definition_info, changer in zip(self.changed_definition_infos, + self.changers): changer.change_argument_mapping(definition_info, mapping) - return mapping.to_call_info(self.changed_definition_infos[-1]).to_string() + return mapping.to_call_info( + self.changed_definition_infos[-1]).to_string() class _ArgumentChanger(object): @@ -190,12 +198,14 @@ def change_definition_info(self, call_info): if self.index < len(call_info.args_with_defaults): del call_info.args_with_defaults[self.index] elif self.index == len(call_info.args_with_defaults) and \ - call_info.args_arg is not None: + call_info.args_arg is not None: call_info.args_arg = None elif (self.index == len(call_info.args_with_defaults) and - call_info.args_arg is None and call_info.keywords_arg is not None) or \ - (self.index == len(call_info.args_with_defaults) + 1 and - call_info.args_arg is not None and call_info.keywords_arg is not None): + call_info.args_arg is None and + call_info.keywords_arg is not None) or \ + (self.index == len(call_info.args_with_defaults) + 1 and + call_info.args_arg is not None and + call_info.keywords_arg is not None): call_info.keywords_arg = None def change_argument_mapping(self, definition_info, mapping): @@ -282,8 +292,8 @@ def change_definition_info(self, definition_info): class _ChangeCallsInModule(object): - def __init__(self, pycore, occurrence_finder, resource, call_changer): - self.pycore = pycore + def __init__(self, project, occurrence_finder, resource, call_changer): + self.project = project self.occurrence_finder = occurrence_finder self.resource = resource self.call_changer = call_changer @@ -291,11 +301,13 @@ def __init__(self, pycore, occurrence_finder, resource, call_changer): def get_changed_module(self): word_finder = worder.Worder(self.source) change_collector = codeanalyze.ChangeCollector(self.source) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): + for occurrence in self.occurrence_finder.find_occurrences( + self.resource): if not occurrence.is_called() and not occurrence.is_defined(): continue start, end = occurrence.get_primary_range() - begin_parens, end_parens = word_finder.get_word_parens_range(end - 1) + begin_parens, end_parens = word_finder.\ + get_word_parens_range(end - 1) if occurrence.is_called(): primary, pyname = occurrence.get_primary_and_pyname() changed_call = self.call_changer.change_call( @@ -310,7 +322,7 @@ def get_changed_module(self): @property @utils.saveit def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) + return self.project.get_pymodule(self.resource) @property @utils.saveit diff --git a/pymode/libs2/rope/refactor/encapsulate_field.py b/pymode/libs2/rope/refactor/encapsulate_field.py index af8d3ccf..32cb7a95 100644 --- a/pymode/libs2/rope/refactor/encapsulate_field.py +++ b/pymode/libs2/rope/refactor/encapsulate_field.py @@ -1,4 +1,10 @@ -from rope.base import pynames, taskhandle, evaluate, exceptions, worder, utils +from rope.base import evaluate +from rope.base import exceptions +from rope.base import libutils +from rope.base import pynames +from rope.base import taskhandle +from rope.base import utils +from rope.base import worder from rope.base.change import ChangeSet, ChangeContents from rope.refactor import sourceutils, occurrences @@ -6,9 +12,9 @@ class EncapsulateField(object): def __init__(self, project, resource, offset): - self.pycore = project.pycore + self.project = project self.name = worder.get_name_at(resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(resource) + this_pymodule = self.project.get_pymodule(resource) self.pyname = evaluate.eval_location(this_pymodule, offset) if not self._is_an_attribute(self.pyname): raise exceptions.RefactoringError( @@ -30,7 +36,7 @@ def get_changes(self, getter=None, setter=None, resources=None, """ if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() changes = ChangeSet('Encapsulate field <%s>' % self.name) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) @@ -39,7 +45,7 @@ def get_changes(self, getter=None, setter=None, resources=None, if setter is None: setter = 'set_' + self.name renamer = GetterSetterRenameInModule( - self.pycore, self.name, self.pyname, getter, setter) + self.project, self.name, self.pyname, getter, setter) for file in resources: job_set.started_job(file.path) if file == self.resource: @@ -61,7 +67,7 @@ def _is_an_attribute(self, pyname): if pyname is not None and isinstance(pyname, pynames.AssignedName): pymodule, lineno = self.pyname.get_definition_location() scope = pymodule.get_scope().\ - get_inner_scope_for_line(lineno) + get_inner_scope_for_line(lineno) if scope.get_kind() == 'Class': return pyname in scope.get_names().values() parent = scope.parent @@ -80,7 +86,7 @@ def _get_defining_scope(self): return pymodule.get_scope().get_inner_scope_for_line(line) def _change_holding_module(self, changes, renamer, getter, setter): - pymodule = self.pycore.resource_to_pyobject(self.resource) + pymodule = self.project.get_pymodule(self.resource) class_scope = self._get_defining_class_scope() defining_object = self._get_defining_scope().pyobject start, end = sourceutils.get_body_region(defining_object) @@ -88,10 +94,11 @@ def _change_holding_module(self, changes, renamer, getter, setter): new_source = renamer.get_changed_module(pymodule=pymodule, skip_start=start, skip_end=end) if new_source is not None: - pymodule = self.pycore.get_string_module(new_source, self.resource) + pymodule = libutils.get_string_module( + self.project, new_source, self.resource) class_scope = pymodule.get_scope().\ - get_inner_scope_for_line(class_scope.get_start()) - indents = sourceutils.get_indent(self.pycore) * ' ' + get_inner_scope_for_line(class_scope.get_start()) + indents = sourceutils.get_indent(self.project) * ' ' getter = 'def %s(self):\n%sreturn self.%s' % \ (getter, indents, self.name) setter = 'def %s(self, value):\n%sself.%s = value' % \ @@ -103,10 +110,10 @@ def _change_holding_module(self, changes, renamer, getter, setter): class GetterSetterRenameInModule(object): - def __init__(self, pycore, name, pyname, getter, setter): - self.pycore = pycore + def __init__(self, project, name, pyname, getter, setter): + self.project = project self.name = name - self.finder = occurrences.create_finder(pycore, name, pyname) + self.finder = occurrences.create_finder(project, name, pyname) self.getter = getter self.setter = setter @@ -120,7 +127,7 @@ def get_changed_module(self, resource=None, pymodule=None, class _FindChangesForModule(object): def __init__(self, finder, resource, pymodule, skip_start, skip_end): - self.pycore = finder.pycore + self.project = finder.project self.finder = finder.finder self.getter = finder.getter self.setter = finder.setter @@ -155,7 +162,7 @@ def get_changed_module(self): + ' %s ' % assignment_type[:-1]) current_line = self.lines.get_line_number(start) start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(current_line) + logical_line_in(current_line) self.last_set = self.lines.get_line_end(end_line) end = self.source.index('=', end) + 1 self.set_index = len(result) @@ -193,7 +200,7 @@ def source(self): @utils.saveit def lines(self): if self.pymodule is None: - self.pymodule = self.pycore.resource_to_pyobject(self.resource) + self.pymodule = self.project.get_pymodule(self.resource) return self.pymodule.lines @property diff --git a/pymode/libs2/rope/refactor/extract.py b/pymode/libs2/rope/refactor/extract.py index 3e7a619c..be541bb5 100644 --- a/pymode/libs2/rope/refactor/extract.py +++ b/pymode/libs2/rope/refactor/extract.py @@ -12,7 +12,7 @@ # # _ExtractInfo: holds information about the refactoring; it is passed # to the parts that need to have information about the refactoring -# +# # _ExtractCollector: merely saves all of the information necessary for # performing the refactoring. # @@ -36,7 +36,6 @@ class _ExtractRefactoring(object): def __init__(self, project, resource, start_offset, end_offset, variable=False): self.project = project - self.pycore = project.pycore self.resource = resource self.start_offset = self._fix_start(resource.read(), start_offset) self.end_offset = self._fix_end(resource.read(), end_offset) @@ -95,9 +94,9 @@ class _ExtractInfo(object): def __init__(self, project, resource, start, end, new_name, variable, similar, make_global): - self.pycore = project.pycore + self.project = project self.resource = resource - self.pymodule = self.pycore.resource_to_pyobject(resource) + self.pymodule = project.get_pymodule(resource) self.global_scope = self.pymodule.get_scope() self.source = self.pymodule.source_code self.lines = self.pymodule.lines @@ -153,8 +152,8 @@ def _choose_closest_line_end(self, offset, end=False): @property def one_line(self): return self.region != self.lines_region and \ - (self.logical_lines.logical_line_in(self.region_lines[0]) == - self.logical_lines.logical_line_in(self.region_lines[1])) + (self.logical_lines.logical_line_in(self.region_lines[0]) == + self.logical_lines.logical_line_in(self.region_lines[1])) @property def global_(self): @@ -163,7 +162,7 @@ def global_(self): @property def method(self): return self.scope.parent is not None and \ - self.scope.parent.get_kind() == 'Class' + self.scope.parent.get_kind() == 'Class' @property def indents(self): @@ -182,6 +181,7 @@ def extracted(self): return self.source[self.region[0]:self.region[1]] _returned = None + @property def returned(self): """Does the extracted piece contain return statement""" @@ -273,7 +273,8 @@ def _where_to_search(self): if self.info.variable: return [self.info.scope_region] else: - return [self.info._get_scope_region(self.info.scope.parent)] + return [self.info._get_scope_region( + self.info.scope.parent)] else: return [self.info.region] @@ -391,8 +392,9 @@ def multi_line_conditions(self, info): 'contain complete statements.') def _is_region_on_a_word(self, info): - if info.region[0] > 0 and self._is_on_a_word(info, info.region[0] - 1) or \ - self._is_on_a_word(info, info.region[1] - 1): + if info.region[0] > 0 and \ + self._is_on_a_word(info, info.region[0] - 1) or \ + self._is_on_a_word(info, info.region[1] - 1): return True def _is_on_a_word(self, info, offset): @@ -436,7 +438,7 @@ def _get_body(self): return result def _find_temps(self): - return usefunction.find_temps(self.info.pycore.project, + return usefunction.find_temps(self.info.project, self._get_body()) def get_checks(self): @@ -468,7 +470,7 @@ def _get_function_definition(self): result.append('@staticmethod\n') result.append('def %s:\n' % self._get_function_signature(args)) unindented_body = self._get_unindented_function_body(returns) - indents = sourceutils.get_indent(self.info.pycore) + indents = sourceutils.get_indent(self.info.project) function_body = sourceutils.indent_lines(unindented_body, indents) result.append(function_body) definition = ''.join(result) @@ -487,11 +489,11 @@ def _get_function_signature(self, args): args.remove(self_name) args.insert(0, self_name) return prefix + self.info.new_name + \ - '(%s)' % self._get_comma_form(args) + '(%s)' % self._get_comma_form(args) def _extracting_method(self): return self.info.method and not self.info.make_global and \ - _get_function_kind(self.info.scope) == 'method' + _get_function_kind(self.info.scope) == 'method' def _get_self_name(self): param_names = self.info.scope.pyobject.get_param_names() @@ -503,7 +505,7 @@ def _get_function_call(self, args): if self.info.method and not self.info.make_global: if _get_function_kind(self.info.scope) == 'method': self_name = self._get_self_name() - if self_name in args: + if self_name in args: args.remove(self_name) prefix = self_name + '.' else: @@ -557,7 +559,7 @@ def _find_function_returns(self): if self.info.one_line or self.info.returned: return [] written = self.info_collector.written | \ - self.info_collector.maybe_written + self.info_collector.maybe_written return list(written & self.info_collector.postread) def _get_unindented_function_body(self, returns): @@ -577,7 +579,7 @@ def __init__(self, info): def get_definition(self): result = self.info.new_name + ' = ' + \ - _join_lines(self.info.extracted) + '\n' + _join_lines(self.info.extracted) + '\n' return result def get_body_pattern(self): @@ -671,7 +673,6 @@ def _For(self, node): self._handle_conditional_node(node) - def _get_argnames(arguments): result = [node.id for node in arguments.args if isinstance(node, ast.Name)] @@ -770,6 +771,7 @@ def has_errors(code): ast.walk(node, visitor) return visitor.error + def _get_function_kind(scope): return scope.pyobject.get_kind() @@ -779,6 +781,7 @@ def _parse_text(body): node = ast.parse(body) return node + def _join_lines(code): lines = [] for line in code.splitlines(): diff --git a/pymode/libs2/rope/refactor/functionutils.py b/pymode/libs2/rope/refactor/functionutils.py index a653b9db..58baf917 100644 --- a/pymode/libs2/rope/refactor/functionutils.py +++ b/pymode/libs2/rope/refactor/functionutils.py @@ -32,9 +32,6 @@ def arguments_to_string(self, from_index=0): @staticmethod def _read(pyfunction, code): - scope = pyfunction.get_scope() - parent = scope.parent - parameter_names = pyfunction.get_param_names() kind = pyfunction.get_kind() is_method = kind == 'method' is_lambda = kind == 'lambda' @@ -89,7 +86,8 @@ def to_string(self): if self.args[start:]: params.extend(self.args[start:]) if self.keywords: - params.extend(['%s=%s' % (name, value) for name, value in self.keywords]) + params.extend(['%s=%s' % (name, value) + for name, value in self.keywords]) if self.args_arg is not None: params.append('*' + self.args_arg) if self.keywords_arg: @@ -120,15 +118,15 @@ def read(primary, pyname, definition_info, code): @staticmethod def _is_method_call(primary, pyname): return primary is not None and \ - isinstance(primary.get_object().get_type(), - rope.base.pyobjects.PyClass) and \ - CallInfo._is_method(pyname) + isinstance(primary.get_object().get_type(), + rope.base.pyobjects.PyClass) and \ + CallInfo._is_method(pyname) @staticmethod def _is_class(pyname): return pyname is not None and \ - isinstance(pyname.get_object(), - rope.base.pyobjects.PyClass) + isinstance(pyname.get_object(), + rope.base.pyobjects.PyClass) @staticmethod def _is_method(pyname): @@ -184,7 +182,8 @@ def to_call_info(self, definition_info): keywords.extend(self.keyword_args) return CallInfo(self.call_info.function_name, args, keywords, self.call_info.args_arg, self.call_info.keywords_arg, - self.call_info.implicit_arg, self.call_info.constructor) + self.call_info.implicit_arg, + self.call_info.constructor) class _FunctionParser(object): @@ -197,7 +196,8 @@ def __init__(self, call, implicit_arg, is_lambda=False): self.last_parens = self.call.rindex(':') else: self.last_parens = self.call.rindex(')') - self.first_parens = self.word_finder._find_parens_start(self.last_parens) + self.first_parens = self.word_finder._find_parens_start( + self.last_parens) def get_parameters(self): args, keywords = self.word_finder.get_parameters(self.first_parens, diff --git a/pymode/libs2/rope/refactor/importutils/__init__.py b/pymode/libs2/rope/refactor/importutils/__init__.py index 2a86edb0..4871faf3 100644 --- a/pymode/libs2/rope/refactor/importutils/__init__.py +++ b/pymode/libs2/rope/refactor/importutils/__init__.py @@ -5,6 +5,7 @@ """ import rope.base.evaluate +from rope.base import libutils from rope.base.change import ChangeSet, ChangeContents from rope.refactor import occurrences, rename from rope.refactor.importutils import module_imports, actions @@ -21,8 +22,7 @@ class ImportOrganizer(object): def __init__(self, project): self.project = project - self.pycore = project.pycore - self.import_tools = ImportTools(self.pycore) + self.import_tools = ImportTools(self.project) def organize_imports(self, resource, offset=None): return self._perform_command_on_import_tools( @@ -45,7 +45,7 @@ def handle_long_imports(self, resource, offset=None): self.import_tools.handle_long_imports, resource, offset) def _perform_command_on_import_tools(self, method, resource, offset): - pymodule = self.pycore.resource_to_pyobject(resource) + pymodule = self.project.get_pymodule(resource) before_performing = pymodule.source_code import_filter = None if offset is not None: @@ -66,26 +66,26 @@ def import_filter(import_stmt): class ImportTools(object): - def __init__(self, pycore): - self.pycore = pycore + def __init__(self, project): + self.project = project def get_import(self, resource): """The import statement for `resource`""" - module_name = self.pycore.modname(resource) + module_name = libutils.modname(resource) return NormalImport(((module_name, None), )) def get_from_import(self, resource, name): """The from import statement for `name` in `resource`""" - module_name = self.pycore.modname(resource) + module_name = libutils.modname(resource) names = [] if isinstance(name, list): names = [(imported, None) for imported in name] else: - names = [(name, None),] + names = [(name, None), ] return FromImport(module_name, 0, tuple(names)) def module_imports(self, module, imports_filter=None): - return module_imports.ModuleImports(self.pycore, module, + return module_imports.ModuleImports(self.project, module, imports_filter) def froms_to_imports(self, pymodule, import_filter=None): @@ -103,7 +103,8 @@ def froms_to_imports(self, pymodule, import_filter=None): if not import_stmt.readonly and \ self._is_transformable_to_normal(import_stmt.import_info): import_stmt.import_info = \ - NormalImport(((import_stmt.import_info.module_name, None),)) + NormalImport(((import_stmt.import_info.module_name, + None),)) module_imports.remove_duplicates() return module_imports.get_changed_source() @@ -121,12 +122,13 @@ def _from_to_normal(self, pymodule, import_stmt): if alias is not None: imported = alias occurrence_finder = occurrences.create_finder( - self.pycore, imported, pymodule[imported], imports=False) + self.project, imported, pymodule[imported], imports=False) source = rename.rename_in_module( occurrence_finder, module_name + '.' + name, pymodule=pymodule, replace_primary=True) if source is not None: - pymodule = self.pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module( + self.project, source, resource) return pymodule def _clean_up_imports(self, pymodule, import_filter): @@ -135,17 +137,20 @@ def _clean_up_imports(self, pymodule, import_filter): module_with_imports.expand_stars() source = module_with_imports.get_changed_source() if source is not None: - pymodule = self.pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module( + self.project, source, resource) source = self.relatives_to_absolutes(pymodule) if source is not None: - pymodule = self.pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module( + self.project, source, resource) module_with_imports = self.module_imports(pymodule, import_filter) module_with_imports.remove_duplicates() module_with_imports.remove_unused_imports() source = module_with_imports.get_changed_source() if source is not None: - pymodule = self.pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module( + self.project, source, resource) return pymodule def relatives_to_absolutes(self, pymodule, import_filter=None): @@ -172,12 +177,14 @@ def organize_imports(self, pymodule, module_imports = self.module_imports(pymodule, import_filter) if unused: module_imports.remove_unused_imports() + if self.project.prefs.get("split_imports"): + module_imports.force_single_imports() if duplicates: module_imports.remove_duplicates() source = module_imports.get_changed_source() if source is not None: - pymodule = self.pycore.get_string_module( - source, pymodule.get_resource()) + pymodule = libutils.get_string_module( + self.project, source, pymodule.get_resource()) if selfs: pymodule = self._remove_self_imports(pymodule, import_filter) if sort: @@ -187,10 +194,12 @@ def organize_imports(self, pymodule, def _remove_self_imports(self, pymodule, import_filter=None): module_imports = self.module_imports(pymodule, import_filter) - to_be_fixed, to_be_renamed = module_imports.get_self_import_fix_and_rename_list() + to_be_fixed, to_be_renamed = \ + module_imports.get_self_import_fix_and_rename_list() for name in to_be_fixed: try: - pymodule = self._rename_in_module(pymodule, name, '', till_dot=True) + pymodule = self._rename_in_module(pymodule, name, '', + till_dot=True) except ValueError: # There is a self import with direct access to it return pymodule @@ -200,16 +209,18 @@ def _remove_self_imports(self, pymodule, import_filter=None): module_imports.get_self_import_fix_and_rename_list() source = module_imports.get_changed_source() if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) + pymodule = libutils.get_string_module( + self.project, source, pymodule.get_resource()) return pymodule def _rename_in_module(self, pymodule, name, new_name, till_dot=False): old_name = name.split('.')[-1] old_pyname = rope.base.evaluate.eval_str(pymodule.get_scope(), name) occurrence_finder = occurrences.create_finder( - self.pycore, old_name, old_pyname, imports=False) + self.project, old_name, old_pyname, imports=False) changes = rope.base.codeanalyze.ChangeCollector(pymodule.source_code) - for occurrence in occurrence_finder.find_occurrences(pymodule=pymodule): + for occurrence in occurrence_finder.find_occurrences( + pymodule=pymodule): start, end = occurrence.get_primary_range() if till_dot: new_end = pymodule.source_code.index('.', end) + 1 @@ -222,7 +233,8 @@ def _rename_in_module(self, pymodule, name, new_name, till_dot=False): changes.add_change(start, end, new_name) source = changes.get_changed() if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) + pymodule = libutils.get_string_module( + self.project, source, pymodule.get_resource()) return pymodule def sort_imports(self, pymodule, import_filter=None): @@ -237,8 +249,8 @@ def handle_long_imports(self, pymodule, maxdots=2, maxlength=27, module_imports = self.module_imports(pymodule, import_filter) to_be_fixed = module_imports.handle_long_imports(maxdots, maxlength) # performing the renaming - pymodule = self.pycore.get_string_module( - module_imports.get_changed_source(), + pymodule = libutils.get_string_module( + self.project, module_imports.get_changed_source(), resource=pymodule.get_resource()) for name in to_be_fixed: pymodule = self._rename_in_module(pymodule, name, @@ -248,22 +260,22 @@ def handle_long_imports(self, pymodule, maxdots=2, maxlength=27, import_filter=import_filter) -def get_imports(pycore, pydefined): +def get_imports(project, pydefined): """A shortcut for getting the `ImportInfo`\s used in a scope""" pymodule = pydefined.get_module() - module = module_imports.ModuleImports(pycore, pymodule) + module = module_imports.ModuleImports(project, pymodule) if pymodule == pydefined: return [stmt.import_info for stmt in module.imports] return module.get_used_imports(pydefined) -def get_module_imports(pycore, pymodule): +def get_module_imports(project, pymodule): """A shortcut for creating a `module_imports.ModuleImports` object""" - return module_imports.ModuleImports(pycore, pymodule) + return module_imports.ModuleImports(project, pymodule) -def add_import(pycore, pymodule, module_name, name=None): - imports = get_module_imports(pycore, pymodule) +def add_import(project, pymodule, module_name, name=None): + imports = get_module_imports(project, pymodule) candidates = [] names = [] # from mod import name @@ -288,7 +300,7 @@ def add_import(pycore, pymodule, module_name, name=None): candidates.append(normal_import) - visitor = actions.AddingVisitor(pycore, candidates) + visitor = actions.AddingVisitor(project, candidates) selected_import = normal_import for import_statement in imports.imports: if import_statement.accept(visitor): diff --git a/pymode/libs2/rope/refactor/importutils/actions.py b/pymode/libs2/rope/refactor/importutils/actions.py index 4851d02f..fd0f7054 100644 --- a/pymode/libs2/rope/refactor/importutils/actions.py +++ b/pymode/libs2/rope/refactor/importutils/actions.py @@ -1,6 +1,4 @@ -import os -import sys - +from rope.base import libutils from rope.base import pyobjects, exceptions, stdmods from rope.refactor import occurrences from rope.refactor.importutils import importinfo @@ -28,24 +26,25 @@ def visitFromImport(self, import_stmt, import_info): class RelativeToAbsoluteVisitor(ImportInfoVisitor): - def __init__(self, pycore, current_folder): + def __init__(self, project, current_folder): self.to_be_absolute = [] - self.pycore = pycore + self.project = project self.folder = current_folder - self.context = importinfo.ImportContext(pycore, current_folder) + self.context = importinfo.ImportContext(project, current_folder) def visitNormalImport(self, import_stmt, import_info): - self.to_be_absolute.extend(self._get_relative_to_absolute_list(import_info)) + self.to_be_absolute.extend( + self._get_relative_to_absolute_list(import_info)) new_pairs = [] for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) + resource = self.project.find_module(name, folder=self.folder) if resource is None: new_pairs.append((name, alias)) continue - absolute_name = self.pycore.modname(resource) + absolute_name = libutils.modname(resource) new_pairs.append((absolute_name, alias)) if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): + new_pairs, import_info.names_and_aliases): import_stmt.import_info = importinfo.NormalImport(new_pairs) def _get_relative_to_absolute_list(self, import_info): @@ -53,10 +52,10 @@ def _get_relative_to_absolute_list(self, import_info): for name, alias in import_info.names_and_aliases: if alias is not None: continue - resource = self.pycore.find_module(name, folder=self.folder) + resource = self.project.find_module(name, folder=self.folder) if resource is None: continue - absolute_name = self.pycore.modname(resource) + absolute_name = libutils.modname(resource) if absolute_name != name: result.append((name, absolute_name)) return result @@ -65,7 +64,7 @@ def visitFromImport(self, import_stmt, import_info): resource = import_info.get_imported_resource(self.context) if resource is None: return None - absolute_name = self.pycore.modname(resource) + absolute_name = libutils.modname(resource) if import_info.module_name != absolute_name: import_stmt.import_info = importinfo.FromImport( absolute_name, 0, import_info.names_and_aliases) @@ -73,11 +72,11 @@ def visitFromImport(self, import_stmt, import_info): class FilteringVisitor(ImportInfoVisitor): - def __init__(self, pycore, folder, can_select): + def __init__(self, project, folder, can_select): self.to_be_absolute = [] - self.pycore = pycore + self.project = project self.can_select = self._transform_can_select(can_select) - self.context = importinfo.ImportContext(pycore, folder) + self.context = importinfo.ImportContext(project, folder) def _transform_can_select(self, can_select): def can_select_name_and_alias(name, alias): @@ -113,10 +112,10 @@ def visitFromImport(self, import_stmt, import_info): class RemovingVisitor(ImportInfoVisitor): - def __init__(self, pycore, folder, can_select): + def __init__(self, project, folder, can_select): self.to_be_absolute = [] - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) + self.project = project + self.filtering = FilteringVisitor(project, folder, can_select) def dispatch(self, import_): result = self.filtering.dispatch(import_) @@ -133,8 +132,8 @@ class AddingVisitor(ImportInfoVisitor): """ - def __init__(self, pycore, import_list): - self.pycore = pycore + def __init__(self, project, import_list): + self.project = project self.import_list = import_list self.import_info = None @@ -162,7 +161,8 @@ def visitNormalImport(self, import_stmt, import_info): # Multiple imports using a single import statement is discouraged # so we won't bother adding them. if self.import_info._are_name_and_alias_lists_equal( - import_info.names_and_aliases, self.import_info.names_and_aliases): + import_info.names_and_aliases, + self.import_info.names_and_aliases): return True def visitFromImport(self, import_stmt, import_info): @@ -174,6 +174,9 @@ def visitFromImport(self, import_stmt, import_info): if self.import_info.is_star_import(): import_stmt.import_info = self.import_info return True + if self.project.prefs.get("split_imports"): + return self.import_info.names_and_aliases == \ + import_info.names_and_aliases new_pairs = list(import_info.names_and_aliases) for pair in self.import_info.names_and_aliases: if pair not in new_pairs: @@ -185,10 +188,10 @@ def visitFromImport(self, import_stmt, import_info): class ExpandStarsVisitor(ImportInfoVisitor): - def __init__(self, pycore, folder, can_select): - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) - self.context = importinfo.ImportContext(pycore, folder) + def __init__(self, project, folder, can_select): + self.project = project + self.filtering = FilteringVisitor(project, folder, can_select) + self.context = importinfo.ImportContext(project, folder) def visitNormalImport(self, import_stmt, import_info): self.filtering.dispatch(import_stmt) @@ -208,18 +211,18 @@ def visitFromImport(self, import_stmt, import_info): class SelfImportVisitor(ImportInfoVisitor): - def __init__(self, pycore, current_folder, resource): - self.pycore = pycore + def __init__(self, project, current_folder, resource): + self.project = project self.folder = current_folder self.resource = resource self.to_be_fixed = set() self.to_be_renamed = set() - self.context = importinfo.ImportContext(pycore, current_folder) + self.context = importinfo.ImportContext(project, current_folder) def visitNormalImport(self, import_stmt, import_info): new_pairs = [] for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) + resource = self.project.find_module(name, folder=self.folder) if resource is not None and resource == self.resource: imported = name if alias is not None: @@ -228,7 +231,7 @@ def visitNormalImport(self, import_stmt, import_info): else: new_pairs.append((name, alias)) if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): + new_pairs, import_info.names_and_aliases): import_stmt.import_info = importinfo.NormalImport(new_pairs) def visitFromImport(self, import_stmt, import_info): @@ -238,7 +241,7 @@ def visitFromImport(self, import_stmt, import_info): if resource == self.resource: self._importing_names_from_self(import_info, import_stmt) return - pymodule = self.pycore.resource_to_pyobject(resource) + pymodule = self.project.get_pymodule(resource) new_pairs = [] for name, alias in import_info.names_and_aliases: try: @@ -254,7 +257,7 @@ def visitFromImport(self, import_stmt, import_info): except exceptions.AttributeNotFoundError: new_pairs.append((name, alias)) if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): + new_pairs, import_info.names_and_aliases): import_stmt.import_info = importinfo.FromImport( import_info.module_name, import_info.level, new_pairs) @@ -268,19 +271,19 @@ def _importing_names_from_self(self, import_info, import_stmt): class SortingVisitor(ImportInfoVisitor): - def __init__(self, pycore, current_folder): - self.pycore = pycore + def __init__(self, project, current_folder): + self.project = project self.folder = current_folder self.standard = set() self.third_party = set() self.in_project = set() self.future = set() - self.context = importinfo.ImportContext(pycore, current_folder) + self.context = importinfo.ImportContext(project, current_folder) def visitNormalImport(self, import_stmt, import_info): if import_info.names_and_aliases: name, alias = import_info.names_and_aliases[0] - resource = self.pycore.find_module( + resource = self.project.find_module( name, folder=self.folder) self._check_imported_resource(import_stmt, resource, name) @@ -291,7 +294,7 @@ def visitFromImport(self, import_stmt, import_info): def _check_imported_resource(self, import_stmt, resource, imported_name): info = import_stmt.import_info - if resource is not None and resource.project == self.pycore.project: + if resource is not None and resource.project == self.project: self.in_project.add(import_stmt) elif _is_future(info): self.future.add(import_stmt) @@ -303,16 +306,15 @@ def _check_imported_resource(self, import_stmt, resource, imported_name): class LongImportVisitor(ImportInfoVisitor): - def __init__(self, current_folder, pycore, maxdots, maxlength): + def __init__(self, current_folder, project, maxdots, maxlength): self.maxdots = maxdots self.maxlength = maxlength self.to_be_renamed = set() self.current_folder = current_folder - self.pycore = pycore + self.project = project self.new_imports = [] def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] for name, alias in import_info.names_and_aliases: if alias is None and self._is_long(name): self.to_be_renamed.add(name) @@ -324,15 +326,15 @@ def visitNormalImport(self, import_stmt, import_info): def _is_long(self, name): return name.count('.') > self.maxdots or \ - ('.' in name and len(name) > self.maxlength) + ('.' in name and len(name) > self.maxlength) class RemovePyNameVisitor(ImportInfoVisitor): - def __init__(self, pycore, pymodule, pyname, folder): + def __init__(self, project, pymodule, pyname, folder): self.pymodule = pymodule self.pyname = pyname - self.context = importinfo.ImportContext(pycore, folder) + self.context = importinfo.ImportContext(project, folder) def visitFromImport(self, import_stmt, import_info): new_pairs = [] @@ -356,4 +358,4 @@ def dispatch(self, import_): def _is_future(info): return isinstance(info, importinfo.FromImport) and \ - info.module_name == '__future__' + info.module_name == '__future__' diff --git a/pymode/libs2/rope/refactor/importutils/importinfo.py b/pymode/libs2/rope/refactor/importutils/importinfo.py index 25c8e813..114080aa 100644 --- a/pymode/libs2/rope/refactor/importutils/importinfo.py +++ b/pymode/libs2/rope/refactor/importutils/importinfo.py @@ -84,7 +84,7 @@ def _are_name_and_alias_lists_equal(self, list1, list2): def __eq__(self, obj): return isinstance(obj, self.__class__) and \ - self.get_import_statement() == obj.get_import_statement() + self.get_import_statement() == obj.get_import_statement() def __ne__(self, obj): return not self.__eq__(obj) @@ -147,10 +147,10 @@ def get_imported_resource(self, context): Returns `None` if module was not found. """ if self.level == 0: - return context.pycore.find_module( + return context.project.find_module( self.module_name, folder=context.folder) else: - return context.pycore.find_relative_module( + return context.project.find_relative_module( self.module_name, context.folder, self.level) def get_imported_module(self, context): @@ -160,10 +160,10 @@ def get_imported_module(self, context): could not be found. """ if self.level == 0: - return context.pycore.get_module( + return context.project.get_module( self.module_name, context.folder) else: - return context.pycore.get_relative_module( + return context.project.get_relative_module( self.module_name, context.folder, self.level) def get_import_statement(self): @@ -180,7 +180,7 @@ def is_empty(self): def is_star_import(self): return len(self.names_and_aliases) > 0 and \ - self.names_and_aliases[0][0] == '*' + self.names_and_aliases[0][0] == '*' class EmptyImport(ImportInfo): @@ -196,6 +196,6 @@ def get_imported_primaries(self, context): class ImportContext(object): - def __init__(self, pycore, folder): - self.pycore = pycore + def __init__(self, project, folder): + self.project = project self.folder = folder diff --git a/pymode/libs2/rope/refactor/importutils/module_imports.py b/pymode/libs2/rope/refactor/importutils/module_imports.py index 874213f2..b96eebc4 100644 --- a/pymode/libs2/rope/refactor/importutils/module_imports.py +++ b/pymode/libs2/rope/refactor/importutils/module_imports.py @@ -1,13 +1,14 @@ -import rope.base.pynames -from rope.base import ast, utils -from rope.refactor.importutils import importinfo +from rope.base import ast +from rope.base import pynames +from rope.base import utils from rope.refactor.importutils import actions +from rope.refactor.importutils import importinfo class ModuleImports(object): - def __init__(self, pycore, pymodule, import_filter=None): - self.pycore = pycore + def __init__(self, project, pymodule, import_filter=None): + self.project = project self.pymodule = pymodule self.separating_lines = 0 self.filter = import_filter @@ -15,7 +16,7 @@ def __init__(self, pycore, pymodule, import_filter=None): @property @utils.saveit def imports(self): - finder = _GlobalImportFinder(self.pymodule, self.pycore) + finder = _GlobalImportFinder(self.pymodule) result = finder.find_import_statements() self.separating_lines = finder.get_separating_line_count() if self.filter is not None: @@ -32,15 +33,16 @@ def _get_unbound_names(self, defined_pyobject): def remove_unused_imports(self): can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) + self.project, self._current_folder(), can_select) for import_statement in self.imports: import_statement.accept(visitor) def get_used_imports(self, defined_pyobject): result = [] - can_select = _OneTimeSelector(self._get_unbound_names(defined_pyobject)) + can_select = _OneTimeSelector( + self._get_unbound_names(defined_pyobject)) visitor = actions.FilteringVisitor( - self.pycore, self._current_folder(), can_select) + self.project, self._current_folder(), can_select) for import_statement in self.imports: new_import = import_statement.accept(visitor) if new_import is not None and not new_import.is_empty(): @@ -48,11 +50,18 @@ def get_used_imports(self, defined_pyobject): return result def get_changed_source(self): - imports = self.imports - after_removing = self._remove_imports(imports) - imports = [stmt for stmt in imports + # Make sure we forward a removed import's preceding blank + # lines count to the following import statement. + prev_stmt = None + for stmt in self.imports: + if prev_stmt is not None and prev_stmt.import_info.is_empty(): + stmt.blank_lines = max(prev_stmt.blank_lines, stmt.blank_lines) + prev_stmt = stmt + # The new list of imports. + imports = [stmt for stmt in self.imports if not stmt.import_info.is_empty()] + after_removing = self._remove_imports(self.imports) first_non_blank = self._first_non_blank_line(after_removing, 0) first_import = self._first_import_line() - 1 result = [] @@ -61,7 +70,6 @@ def get_changed_source(self): # Writing imports sorted_imports = sorted(imports, self._compare_import_locations) for stmt in sorted_imports: - start = self._get_import_location(stmt) if stmt != sorted_imports[0]: result.append('\n' * stmt.blank_lines) result.append(stmt.get_import_statement() + '\n') @@ -111,7 +119,7 @@ def _first_non_blank_line(self, lines, lineno): return result def add_import(self, import_info): - visitor = actions.AddingVisitor(self.pycore, [import_info]) + visitor = actions.AddingVisitor(self.project, [import_info]) for import_statement in self.imports: if import_statement.accept(visitor): break @@ -132,21 +140,21 @@ def _get_new_import_lineno(self): def filter_names(self, can_select): visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) + self.project, self._current_folder(), can_select) for import_statement in self.imports: import_statement.accept(visitor) def expand_stars(self): can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) visitor = actions.ExpandStarsVisitor( - self.pycore, self._current_folder(), can_select) + self.project, self._current_folder(), can_select) for import_statement in self.imports: import_statement.accept(visitor) def remove_duplicates(self): added_imports = [] for import_stmt in self.imports: - visitor = actions.AddingVisitor(self.pycore, + visitor = actions.AddingVisitor(self.project, [import_stmt.import_info]) for added_import in added_imports: if added_import.accept(visitor): @@ -154,17 +162,34 @@ def remove_duplicates(self): else: added_imports.append(import_stmt) + def force_single_imports(self): + """force a single import per statement""" + for import_stmt in self.imports[:]: + import_info = import_stmt.import_info + if import_info.is_empty(): + continue + if len(import_info.names_and_aliases) > 1: + for name_and_alias in import_info.names_and_aliases: + if hasattr(import_info, "module_name"): + new_import = importinfo.FromImport( + import_info.module_name, import_info.level, + [name_and_alias]) + else: + new_import = importinfo.NormalImport([name_and_alias]) + self.add_import(new_import) + import_stmt.empty_import() + def get_relative_to_absolute_list(self): - visitor = rope.refactor.importutils.actions.RelativeToAbsoluteVisitor( - self.pycore, self._current_folder()) + visitor = actions.RelativeToAbsoluteVisitor( + self.project, self._current_folder()) for import_stmt in self.imports: if not import_stmt.readonly: import_stmt.accept(visitor) return visitor.to_be_absolute def get_self_import_fix_and_rename_list(self): - visitor = rope.refactor.importutils.actions.SelfImportVisitor( - self.pycore, self._current_folder(), self.pymodule.get_resource()) + visitor = actions.SelfImportVisitor( + self.project, self._current_folder(), self.pymodule.get_resource()) for import_stmt in self.imports: if not import_stmt.readonly: import_stmt.accept(visitor) @@ -174,15 +199,19 @@ def _current_folder(self): return self.pymodule.get_resource().parent def sort_imports(self): + if self.project.prefs.get("sort_imports_alphabetically"): + sort_kwargs = dict(key=self._get_import_name) + else: + sort_kwargs = dict(cmp=self._compare_imports) + # IDEA: Sort from import list - visitor = actions.SortingVisitor(self.pycore, self._current_folder()) + visitor = actions.SortingVisitor(self.project, self._current_folder()) for import_statement in self.imports: import_statement.accept(visitor) - in_projects = sorted(visitor.in_project, self._compare_imports) - third_party = sorted(visitor.third_party, self._compare_imports) - standards = sorted(visitor.standard, self._compare_imports) - future = sorted(visitor.future, self._compare_imports) - blank_lines = 0 + in_projects = sorted(visitor.in_project, **sort_kwargs) + third_party = sorted(visitor.third_party, **sort_kwargs) + standards = sorted(visitor.standard, **sort_kwargs) + future = sorted(visitor.future, **sort_kwargs) last_index = self._first_import_line() last_index = self._move_imports(future, last_index, 0) last_index = self._move_imports(standards, last_index, 1) @@ -208,6 +237,14 @@ def _first_import_line(self): break return lineno + def _get_import_name(self, import_stmt): + import_info = import_stmt.import_info + if hasattr(import_info, "module_name"): + return "%s.%s" % (import_info.module_name, + import_info.names_and_aliases[0][0]) + else: + return import_info.names_and_aliases[0][0] + def _compare_imports(self, stmt1, stmt2): str1 = stmt1.get_import_statement() str2 = stmt2.get_import_statement() @@ -229,7 +266,7 @@ def _move_imports(self, imports, index, blank_lines): def handle_long_imports(self, maxdots, maxlength): visitor = actions.LongImportVisitor( - self._current_folder(), self.pycore, maxdots, maxlength) + self._current_folder(), self.project, maxdots, maxlength) for import_statement in self.imports: if not import_statement.readonly: import_statement.accept(visitor) @@ -239,7 +276,7 @@ def handle_long_imports(self, maxdots, maxlength): def remove_pyname(self, pyname): """Removes pyname when imported in ``from mod import x``""" - visitor = actions.RemovePyNameVisitor(self.pycore, self.pymodule, + visitor = actions.RemovePyNameVisitor(self.project, self.pymodule, pyname, self._current_folder()) for import_stmt in self.imports: import_stmt.accept(visitor) @@ -277,7 +314,7 @@ def __init__(self, pyobject): def _visit_child_scope(self, node): pyobject = self.pyobject.get_module().get_scope().\ - get_inner_scope_for_line(node.lineno).pyobject + get_inner_scope_for_line(node.lineno).pyobject visitor = _LocalUnboundNameFinder(pyobject, self) for child in ast.get_child_nodes(node): ast.walk(child, visitor) @@ -324,8 +361,8 @@ def __init__(self, pymodule, wanted_pyobject): self.unbound = set() self.names = set() for name, pyname in pymodule._get_structural_attributes().items(): - if not isinstance(pyname, (rope.base.pynames.ImportedName, - rope.base.pynames.ImportedModule)): + if not isinstance(pyname, (pynames.ImportedName, + pynames.ImportedModule)): self.names.add(name) wanted_scope = wanted_pyobject.get_scope() self.start = wanted_scope.get_start() @@ -374,12 +411,11 @@ def add_unbound(self, name): class _GlobalImportFinder(object): - def __init__(self, pymodule, pycore): + def __init__(self, pymodule): self.current_folder = None if pymodule.get_resource(): self.current_folder = pymodule.get_resource().parent self.pymodule = pymodule - self.pycore = pycore self.imports = [] self.pymodule = pymodule self.lines = self.pymodule.lines @@ -428,13 +464,14 @@ def visit_from(self, node, end_line): if node.level: level = node.level import_info = importinfo.FromImport( - node.module or '', # see comment at rope.base.ast.walk + node.module or '', # see comment at rope.base.ast.walk level, self._get_names(node.names)) start_line = node.lineno self.imports.append(importinfo.ImportStatement( import_info, node.lineno, end_line, self._get_text(start_line, end_line), - blank_lines=self._count_empty_lines_before(start_line))) + blank_lines= + self._count_empty_lines_before(start_line))) def _get_names(self, alias_names): result = [] diff --git a/pymode/libs2/rope/refactor/inline.py b/pymode/libs2/rope/refactor/inline.py index cfd64a7e..0ae1f8f4 100644 --- a/pymode/libs2/rope/refactor/inline.py +++ b/pymode/libs2/rope/refactor/inline.py @@ -21,17 +21,19 @@ import rope.base.exceptions import rope.refactor.functionutils from rope.base import (pynames, pyobjects, codeanalyze, - taskhandle, evaluate, worder, utils) + taskhandle, evaluate, worder, utils, libutils) from rope.base.change import ChangeSet, ChangeContents from rope.refactor import (occurrences, rename, sourceutils, importutils, move, change_signature) + def unique_prefix(): n = 0 while True: yield "__" + str(n) + "__" n += 1 + def create_inline(project, resource, offset): """Create a refactoring object for inlining @@ -39,8 +41,7 @@ def create_inline(project, resource, offset): `InlineMethod`, `InlineVariable` or `InlineParameter`. """ - pycore = project.pycore - pyname = _get_pyname(pycore, resource, offset) + pyname = _get_pyname(project, resource, offset) message = 'Inline refactoring should be performed on ' \ 'a method, local variable or parameter.' if pyname is None: @@ -61,9 +62,8 @@ class _Inliner(object): def __init__(self, project, resource, offset): self.project = project - self.pycore = project.pycore - self.pyname = _get_pyname(self.pycore, resource, offset) - range_finder = worder.Worder(resource.read()) + self.pyname = _get_pyname(project, resource, offset) + range_finder = worder.Worder(resource.read(), True) self.region = range_finder.get_primary_range(offset) self.name = range_finder.get_word_at(offset) self.offset = offset @@ -84,7 +84,7 @@ def __init__(self, *args, **kwds): self.pymodule = self.pyfunction.get_module() self.resource = self.pyfunction.get_module().get_resource() self.occurrence_finder = occurrences.create_finder( - self.pycore, self.name, self.pyname) + self.project, self.name, self.pyname) self.normal_generator = _DefinitionGenerator(self.project, self.pyfunction) self._init_imports() @@ -92,7 +92,7 @@ def __init__(self, *args, **kwds): def _init_imports(self): body = sourceutils.get_body(self.pyfunction) body, imports = move.moving_code_with_imports( - self.pycore, self.resource, body) + self.project, self.resource, body) self.imports = imports self.others_generator = _DefinitionGenerator( self.project, self.pyfunction, body=body) @@ -100,7 +100,6 @@ def _init_imports(self): def _get_scope_range(self): scope = self.pyfunction.get_scope() lines = self.pymodule.lines - logicals = self.pymodule.logical_lines start_line = scope.get_start() if self.pyfunction.decorators: decorators = self.pyfunction.decorators @@ -121,7 +120,7 @@ def get_changes(self, remove=True, only_current=False, resources=None, """ changes = ChangeSet('Inline method <%s>' % self.name) if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() if only_current: resources = [self.original] if remove: @@ -132,20 +131,20 @@ def get_changes(self, remove=True, only_current=False, resources=None, job_set.started_job(file.path) if file == self.resource: changes.add_change(self._defining_file_changes( - changes, remove=remove, only_current=only_current)) + changes, remove=remove, only_current=only_current)) else: aim = None if only_current and self.original == file: aim = self.offset handle = _InlineFunctionCallsForModuleHandle( - self.pycore, file, self.others_generator, aim) + self.project, file, self.others_generator, aim) result = move.ModuleSkipRenamer( self.occurrence_finder, file, handle).get_changed_module() if result is not None: - result = _add_imports(self.pycore, result, + result = _add_imports(self.project, result, file, self.imports) if remove: - result = _remove_from(self.pycore, self.pyname, + result = _remove_from(self.project, self.pyname, result, file) changes.add_change(ChangeContents(file, result)) job_set.finished_job() @@ -154,8 +153,6 @@ def get_changes(self, remove=True, only_current=False, resources=None, def _get_removed_range(self): scope = self.pyfunction.get_scope() lines = self.pymodule.lines - logical = self.pymodule.logical_lines - start_line = scope.get_start() start, end = self._get_scope_range() end_line = scope.get_end() for i in range(end_line + 1, lines.length()): @@ -177,7 +174,7 @@ def _defining_file_changes(self, changes, remove, only_current): # we don't want to change any of them aim = len(self.resource.read()) + 100 handle = _InlineFunctionCallsForModuleHandle( - self.pycore, self.resource, + self.project, self.resource, self.normal_generator, aim_offset=aim) replacement = None if remove: @@ -200,7 +197,6 @@ def _is_the_last_method_of_a_class(self): return False class_start, class_end = sourceutils.get_body_region(pyclass) source = self.pymodule.source_code - lines = self.pymodule.lines func_start, func_end = self._get_scope_range() if source[class_start:func_start].strip() == '' and \ source[func_end:class_end].strip() == '': @@ -226,12 +222,12 @@ def _check_exceptional_conditions(self): 'Local variable should be assigned once for inlining.') def get_changes(self, remove=True, only_current=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): + docs=False, task_handle=taskhandle.NullTaskHandle()): if resources is None: if rename._is_local(self.pyname): resources = [self.resource] else: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() if only_current: resources = [self.original] if remove and self.original != self.resource: @@ -243,28 +239,29 @@ def get_changes(self, remove=True, only_current=False, resources=None, for resource in resources: jobset.started_job(resource.path) if resource == self.resource: - source = self._change_main_module(remove, only_current) + source = self._change_main_module(remove, only_current, docs) changes.add_change(ChangeContents(self.resource, source)) else: result = self._change_module(resource, remove, only_current) if result is not None: - result = _add_imports(self.pycore, result, + result = _add_imports(self.project, result, resource, self.imports) changes.add_change(ChangeContents(resource, result)) jobset.finished_job() return changes - def _change_main_module(self, remove, only_current): + def _change_main_module(self, remove, only_current, docs): region = None if only_current and self.original == self.resource: region = self.region - return _inline_variable(self.pycore, self.pymodule, self.pyname, - self.name, remove=remove, region=region) + return _inline_variable(self.project, self.pymodule, self.pyname, + self.name, remove=remove, region=region, + docs=docs) def _init_imports(self): vardef = _getvardef(self.pymodule, self.pyname) self.imported, self.imports = move.moving_code_with_imports( - self.pycore, self.resource, vardef) + self.project, self.resource, vardef) def _change_module(self, resource, remove, only_current): filters = [occurrences.NoImportsFilter(), @@ -275,11 +272,12 @@ def check_aim(occurrence): if self.offset < start or end < self.offset: return False filters.insert(0, check_aim) - finder = occurrences.Finder(self.pycore, self.name, filters=filters) + finder = occurrences.Finder(self.project, self.name, filters=filters) changed = rename.rename_in_module( finder, self.imported, resource=resource, replace_primary=True) if changed and remove: - changed = _remove_from(self.pycore, self.pyname, changed, resource) + changed = _remove_from(self.project, self.pyname, + changed, resource) return changed def get_kind(self): @@ -329,8 +327,9 @@ def _join_lines(lines): class _DefinitionGenerator(object): unique_prefix = unique_prefix() + def __init__(self, project, pyfunction, body=None): - self.pycore = project.pycore + self.project = project self.pyfunction = pyfunction self.pymodule = pyfunction.get_module() self.resource = self.pymodule.get_resource() @@ -360,10 +359,11 @@ def _get_definition_params(self): def get_function_name(self): return self.pyfunction.get_name() - def get_definition(self, primary, pyname, call, host_vars=[],returns=False): + def get_definition(self, primary, pyname, call, host_vars=[], + returns=False): # caching already calculated definitions return self._calculate_definition(primary, pyname, call, - host_vars, returns) + host_vars, returns) def _calculate_header(self, primary, pyname, call): # A header is created which initializes parameters @@ -377,10 +377,6 @@ def _calculate_header(self, primary, pyname, call): paramdict[param_name] = value header = '' to_be_inlined = [] - mod = self.pycore.get_string_module(self.body) - all_names = mod.get_scope().get_names() - assigned_names = [name for name in all_names if - isinstance(all_names[name], rope.base.pynamesdef.AssignedName)] for name, value in paramdict.items(): if name != value and value is not None: header += name + ' = ' + value.replace('\n', ' ') + '\n' @@ -392,32 +388,36 @@ def _calculate_definition(self, primary, pyname, call, host_vars, returns): header, to_be_inlined = self._calculate_header(primary, pyname, call) source = header + self.body - mod = self.pycore.get_string_module(source) + mod = libutils.get_string_module(self.project, source) name_dict = mod.get_scope().get_names() - all_names = [x for x in name_dict if - not isinstance(name_dict[x], rope.base.builtins.BuiltinName)] + all_names = [x for x in name_dict if + not isinstance(name_dict[x], + rope.base.builtins.BuiltinName)] # If there is a name conflict, all variable names # inside the inlined function are renamed if len(set(all_names).intersection(set(host_vars))) > 0: prefix = _DefinitionGenerator.unique_prefix.next() - guest = self.pycore.get_string_module(source, self.resource) + guest = libutils.get_string_module(self.project, source, + self.resource) - to_be_inlined = [prefix+item for item in to_be_inlined] + to_be_inlined = [prefix + item for item in to_be_inlined] for item in all_names: pyname = guest[item] - occurrence_finder = occurrences.create_finder( - self.pycore, item, pyname) + occurrence_finder = occurrences.create_finder(self.project, + item, pyname) source = rename.rename_in_module(occurrence_finder, - prefix+item, pymodule=guest) - guest = self.pycore.get_string_module(source, self.resource) + prefix + item, pymodule=guest) + guest = libutils.get_string_module( + self.project, source, self.resource) #parameters not reassigned inside the functions are now inlined. for name in to_be_inlined: - pymodule = self.pycore.get_string_module(source, self.resource) + pymodule = libutils.get_string_module( + self.project, source, self.resource) pyname = pymodule[name] - source = _inline_variable(self.pycore, pymodule, pyname, name) + source = _inline_variable(self.project, pymodule, pyname, name) return self._replace_returns_with(source, returns) @@ -425,19 +425,22 @@ def _replace_returns_with(self, source, returns): result = [] returned = None last_changed = 0 - for match in _DefinitionGenerator._get_return_pattern().finditer(source): + for match in _DefinitionGenerator._get_return_pattern().finditer( + source): for key, value in match.groupdict().items(): if value and key == 'return': result.append(source[last_changed:match.start('return')]) if returns: self._check_nothing_after_return(source, match.end('return')) + beg_idx = match.end('return') returned = _join_lines( - source[match.end('return'): len(source)].splitlines()) + source[beg_idx:len(source)].splitlines()) last_changed = len(source) else: current = match.end('return') - while current < len(source) and source[current] in ' \t': + while current < len(source) and \ + source[current] in ' \t': current += 1 last_changed = current if current == len(source) or source[current] == '\n': @@ -452,7 +455,8 @@ def _check_nothing_after_return(self, source, offset): lineno = logical_lines.logical_line_in(lineno)[1] if source[lines.get_line_end(lineno):len(source)].strip() != '': raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions with statements after return statement.') + 'Cannot inline functions with statements ' + + 'after return statement.') @classmethod def _get_return_pattern(cls): @@ -471,7 +475,7 @@ def named_pattern(name, list_): class _InlineFunctionCallsForModuleHandle(object): - def __init__(self, pycore, resource, + def __init__(self, project, resource, definition_generator, aim_offset=None): """Inlines occurrences @@ -479,7 +483,7 @@ def __init__(self, pycore, resource, `aim` offset will be inlined. """ - self.pycore = pycore + self.project = project self.generator = definition_generator self.resource = resource self.aim = aim_offset @@ -504,24 +508,24 @@ def occurred_outside_skip(self, change_collector, occurrence): end_parens = self._find_end_parens(self.source, end - 1) lineno = self.lines.get_line_number(start) start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(lineno) + logical_line_in(lineno) line_start = self.lines.get_line_start(start_line) line_end = self.lines.get_line_end(end_line) - returns = self.source[line_start:start].strip() != '' or \ - self.source[end_parens:line_end].strip() != '' + self.source[end_parens:line_end].strip() != '' indents = sourceutils.get_indents(self.lines, start_line) primary, pyname = occurrence.get_primary_and_pyname() - host = self.pycore.resource_to_pyobject(self.resource) + host = self.pymodule scope = host.scope.get_inner_scope_for_line(lineno) definition, returned = self.generator.get_definition( - primary, pyname, self.source[start:end_parens], scope.get_names(), returns=returns) + primary, pyname, self.source[start:end_parens], scope.get_names(), + returns=returns) end = min(line_end + 1, len(self.source)) - change_collector.add_change(line_start, end, - sourceutils.fix_indentation(definition, indents)) + change_collector.add_change( + line_start, end, sourceutils.fix_indentation(definition, indents)) if returns: name = returned if name is None: @@ -537,7 +541,7 @@ def _find_end_parens(self, source, offset): @property @utils.saveit def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) + return self.project.get_pymodule(self.resource) @property @utils.saveit @@ -553,12 +557,13 @@ def lines(self): return self.pymodule.lines -def _inline_variable(pycore, pymodule, pyname, name, - remove=True, region=None): +def _inline_variable(project, pymodule, pyname, name, + remove=True, region=None, docs=False): definition = _getvardef(pymodule, pyname) start, end = _assigned_lineno(pymodule, pyname) - occurrence_finder = occurrences.create_finder(pycore, name, pyname) + occurrence_finder = occurrences.create_finder(project, name, pyname, + docs=docs) changed_source = rename.rename_in_module( occurrence_finder, definition, pymodule=pymodule, replace_primary=True, writes=False, region=region) @@ -567,11 +572,12 @@ def _inline_variable(pycore, pymodule, pyname, name, if remove: lines = codeanalyze.SourceLinesAdapter(changed_source) source = changed_source[:lines.get_line_start(start)] + \ - changed_source[lines.get_line_end(end) + 1:] + changed_source[lines.get_line_end(end) + 1:] else: source = changed_source return source + def _getvardef(pymodule, pyname): assignment = pyname.assignments[0] lines = pymodule.lines @@ -581,35 +587,39 @@ def _getvardef(pymodule, pyname): if assignment.levels: raise rope.base.exceptions.RefactoringError( 'Cannot inline tuple assignments.') - definition = definition_with_assignment[definition_with_assignment.\ + definition = definition_with_assignment[definition_with_assignment. index('=') + 1:].strip() return definition + def _assigned_lineno(pymodule, pyname): definition_line = pyname.assignments[0].ast_node.lineno return pymodule.logical_lines.logical_line_in(definition_line) -def _add_imports(pycore, source, resource, imports): + +def _add_imports(project, source, resource, imports): if not imports: return source - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) + pymodule = libutils.get_string_module(project, source, resource) + module_import = importutils.get_module_imports(project, pymodule) for import_info in imports: module_import.add_import(import_info) source = module_import.get_changed_source() - pymodule = pycore.get_string_module(source, resource) - import_tools = importutils.ImportTools(pycore) + pymodule = libutils.get_string_module(project, source, resource) + import_tools = importutils.ImportTools(project) return import_tools.organize_imports(pymodule, unused=False, sort=False) -def _get_pyname(pycore, resource, offset): - pymodule = pycore.resource_to_pyobject(resource) + +def _get_pyname(project, resource, offset): + pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(pymodule, offset) if isinstance(pyname, pynames.ImportedName): pyname = pyname._get_imported_pyname() return pyname -def _remove_from(pycore, pyname, source, resource): - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) + +def _remove_from(project, pyname, source, resource): + pymodule = libutils.get_string_module(project, source, resource) + module_import = importutils.get_module_imports(project, pymodule) module_import.remove_pyname(pyname) return module_import.get_changed_source() diff --git a/pymode/libs2/rope/refactor/introduce_factory.py b/pymode/libs2/rope/refactor/introduce_factory.py index 5a885587..7532e361 100644 --- a/pymode/libs2/rope/refactor/introduce_factory.py +++ b/pymode/libs2/rope/refactor/introduce_factory.py @@ -1,5 +1,6 @@ import rope.base.exceptions import rope.base.pyobjects +from rope.base import libutils from rope.base import taskhandle, evaluate from rope.base.change import (ChangeSet, ChangeContents) from rope.refactor import rename, occurrences, sourceutils, importutils @@ -8,13 +9,14 @@ class IntroduceFactory(object): def __init__(self, project, resource, offset): - self.pycore = project.pycore + self.project = project self.offset = offset - this_pymodule = self.pycore.resource_to_pyobject(resource) + this_pymodule = self.project.get_pymodule(resource) self.old_pyname = evaluate.eval_location(this_pymodule, offset) - if self.old_pyname is None or not isinstance(self.old_pyname.get_object(), - rope.base.pyobjects.PyClass): + if self.old_pyname is None or \ + not isinstance(self.old_pyname.get_object(), + rope.base.pyobjects.PyClass): raise rope.base.exceptions.RefactoringError( 'Introduce factory should be performed on a class.') self.old_name = self.old_pyname.get_object().get_name() @@ -35,7 +37,7 @@ def get_changes(self, factory_name, global_factory=False, resources=None, """ if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() changes = ChangeSet('Introduce factory method <%s>' % factory_name) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) @@ -64,11 +66,11 @@ def _change_module(self, resources, changes, global_) if changed_code is not None: if global_: - new_pymodule = self.pycore.get_string_module(changed_code, - self.resource) - modname = self.pycore.modname(self.resource) + new_pymodule = libutils.get_string_module( + self.project, changed_code, self.resource) + modname = libutils.modname(self.resource) changed_code, imported = importutils.add_import( - self.pycore, new_pymodule, modname, factory_name) + self.project, new_pymodule, modname, factory_name) changed_code = changed_code.replace(replacement, imported) changes.add_change(ChangeContents(file_, changed_code)) job_set.finished_job() @@ -81,8 +83,8 @@ def _change_resource(self, changes, factory_name, global_): if source_code is None: source_code = self.pymodule.source_code else: - self.pymodule = self.pycore.get_string_module( - source_code, resource=self.resource) + self.pymodule = libutils.get_string_module( + self.project, source_code, resource=self.resource) lines = self.pymodule.lines start = self._get_insertion_offset(class_scope, lines) result = source_code[:start] @@ -100,7 +102,7 @@ def _get_insertion_offset(self, class_scope, lines): def _get_factory_method(self, lines, class_scope, factory_name, global_): - unit_indents = ' ' * sourceutils.get_indent(self.pycore) + unit_indents = ' ' * sourceutils.get_indent(self.project) if global_: if self._get_scope_indents(lines, class_scope) > 0: raise rope.base.exceptions.RefactoringError( @@ -111,7 +113,7 @@ def _get_factory_method(self, lines, class_scope, ('@staticmethod\ndef %s(*args, **kwds):\n' % factory_name + '%sreturn %s(*args, **kwds)\n' % (unit_indents, self.old_name)) indents = self._get_scope_indents(lines, class_scope) + \ - sourceutils.get_indent(self.pycore) + sourceutils.get_indent(self.project) return '\n' + sourceutils.indent_lines(unindented_factory, indents) def _get_scope_indents(self, lines, scope): @@ -124,7 +126,7 @@ def _new_function_name(self, factory_name, global_): return self.old_name + '.' + factory_name def _rename_occurrences(self, file_, changed_name, global_factory): - finder = occurrences.create_finder(self.pycore, self.old_name, + finder = occurrences.create_finder(self.project, self.old_name, self.old_pyname, only_calls=True) result = rename.rename_in_module(finder, changed_name, resource=file_, replace_primary=global_factory) diff --git a/pymode/libs2/rope/refactor/introduce_parameter.py b/pymode/libs2/rope/refactor/introduce_parameter.py index 312c61aa..43d6f755 100644 --- a/pymode/libs2/rope/refactor/introduce_parameter.py +++ b/pymode/libs2/rope/refactor/introduce_parameter.py @@ -35,10 +35,10 @@ def f(p=a.var): """ def __init__(self, project, resource, offset): - self.pycore = project.pycore + self.project = project self.resource = resource self.offset = offset - self.pymodule = self.pycore.resource_to_pyobject(self.resource) + self.pymodule = self.project.get_pymodule(self.resource) scope = self.pymodule.get_scope().get_inner_scope_for_offset(offset) if scope.get_kind() != 'Function': raise exceptions.RefactoringError( @@ -79,7 +79,7 @@ def _get_header_offsets(self): lines = self.pymodule.lines start_line = self.pyfunction.get_scope().get_start() end_line = self.pymodule.logical_lines.\ - logical_line_in(start_line)[1] + logical_line_in(start_line)[1] start = lines.get_line_start(start_line) end = lines.get_line_end(end_line) start = self.pymodule.source_code.find('def', start) + 4 @@ -88,7 +88,8 @@ def _get_header_offsets(self): def _change_function_occurances(self, collector, function_start, function_end, new_name): - finder = occurrences.create_finder(self.pycore, self.name, self.pyname) + finder = occurrences.create_finder(self.project, self.name, + self.pyname) for occurrence in finder.find_occurrences(resource=self.resource): start, end = occurrence.get_primary_range() if function_start <= start < function_end: diff --git a/pymode/libs2/rope/refactor/localtofield.py b/pymode/libs2/rope/refactor/localtofield.py index 532d4c9e..f276070f 100644 --- a/pymode/libs2/rope/refactor/localtofield.py +++ b/pymode/libs2/rope/refactor/localtofield.py @@ -6,13 +6,12 @@ class LocalToField(object): def __init__(self, project, resource, offset): self.project = project - self.pycore = project.pycore self.resource = resource self.offset = offset def get_changes(self): name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) + this_pymodule = self.project.get_pymodule(self.resource) pyname = evaluate.eval_location(this_pymodule, self.offset) if not self._is_a_method_local(pyname): raise exceptions.RefactoringError( @@ -26,7 +25,7 @@ def get_changes(self): new_name = self._get_field_name(function_scope.pyobject, name) changes = Rename(self.project, self.resource, self.offset).\ - get_changes(new_name, resources=[self.resource]) + get_changes(new_name, resources=[self.resource]) return changes def _check_redefinition(self, name, function_scope): @@ -45,6 +44,6 @@ def _is_a_method_local(self, pyname): holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) parent = holding_scope.parent return isinstance(pyname, pynames.AssignedName) and \ - pyname in holding_scope.get_names().values() and \ - holding_scope.get_kind() == 'Function' and \ - parent is not None and parent.get_kind() == 'Class' + pyname in holding_scope.get_names().values() and \ + holding_scope.get_kind() == 'Function' and \ + parent is not None and parent.get_kind() == 'Class' diff --git a/pymode/libs2/rope/refactor/method_object.py b/pymode/libs2/rope/refactor/method_object.py index b3dd6bdd..29ce429d 100644 --- a/pymode/libs2/rope/refactor/method_object.py +++ b/pymode/libs2/rope/refactor/method_object.py @@ -1,5 +1,6 @@ import warnings +from rope.base import libutils from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze from rope.refactor import sourceutils, occurrences, rename @@ -7,8 +8,8 @@ class MethodObject(object): def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) + self.project = project + this_pymodule = self.project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) if pyname is None or not isinstance(pyname.get_object(), pyobjects.PyFunction): @@ -21,10 +22,10 @@ def __init__(self, project, resource, offset): def get_new_class(self, name): body = sourceutils.fix_indentation( - self._get_body(), sourceutils.get_indent(self.pycore) * 2) + self._get_body(), sourceutils.get_indent(self.project) * 2) return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \ (name, self._get_init(), - ' ' * sourceutils.get_indent(self.pycore), body) + ' ' * sourceutils.get_indent(self.project), body) def get_changes(self, classname=None, new_class_name=None): if new_class_name is not None: @@ -36,14 +37,15 @@ def get_changes(self, classname=None, new_class_name=None): start, end = sourceutils.get_body_region(self.pyfunction) indents = sourceutils.get_indents( self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) + sourceutils.get_indent(self.project) new_contents = ' ' * indents + 'return %s(%s)()\n' % \ (classname, ', '.join(self._get_parameter_names())) collector.add_change(start, end, new_contents) insertion = self._get_class_insertion_point() collector.add_change(insertion, insertion, '\n\n' + self.get_new_class(classname)) - changes = change.ChangeSet('Replace method with method object refactoring') + changes = change.ChangeSet( + 'Replace method with method object refactoring') changes.add_change(change.ChangeContents(self.resource, collector.get_changed())) return changes @@ -59,9 +61,10 @@ def _get_body(self): body = sourceutils.get_body(self.pyfunction) for param in self._get_parameter_names(): body = param + ' = None\n' + body - pymod = self.pycore.get_string_module(body, self.resource) + pymod = libutils.get_string_module( + self.project, body, self.resource) pyname = pymod[param] - finder = occurrences.create_finder(self.pycore, param, pyname) + finder = occurrences.create_finder(self.project, param, pyname) result = rename.rename_in_module(finder, 'self.' + param, pymodule=pymod) body = result[result.index('\n') + 1:] @@ -69,7 +72,7 @@ def _get_body(self): def _get_init(self): params = self._get_parameter_names() - indents = ' ' * sourceutils.get_indent(self.pycore) + indents = ' ' * sourceutils.get_indent(self.project) if not params: return '' header = indents + 'def __init__(self' diff --git a/pymode/libs2/rope/refactor/move.py b/pymode/libs2/rope/refactor/move.py index c8761011..60df493e 100644 --- a/pymode/libs2/rope/refactor/move.py +++ b/pymode/libs2/rope/refactor/move.py @@ -4,9 +4,11 @@ based on inputs. """ -from rope.base import pyobjects, codeanalyze, exceptions, pynames, taskhandle, evaluate, worder +from rope.base import (pyobjects, codeanalyze, exceptions, pynames, + taskhandle, evaluate, worder, libutils) from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import importutils, rename, occurrences, sourceutils, functionutils +from rope.refactor import importutils, rename, occurrences, sourceutils, \ + functionutils def create_move(project, resource, offset=None): @@ -18,7 +20,7 @@ def create_move(project, resource, offset=None): """ if offset is None: return MoveModule(project, resource) - this_pymodule = project.pycore.resource_to_pyobject(resource) + this_pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) if pyname is None: raise exceptions.RefactoringError( @@ -48,8 +50,7 @@ class MoveMethod(object): def __init__(self, project, resource, offset): self.project = project - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) + this_pymodule = self.project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) self.method_name = worder.get_name_at(resource, offset) self.pyfunction = pyname.get_object() @@ -73,7 +74,7 @@ def get_changes(self, dest_attr, new_name=None, resources=None, """ changes = ChangeSet('Moving method <%s>' % self.method_name) if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() if new_name is None: new_name = self.get_method_name() resource1, start1, end1, new_content1 = \ @@ -89,11 +90,11 @@ def get_changes(self, dest_attr, new_name=None, resources=None, collector2 = codeanalyze.ChangeCollector(resource2.read()) collector2.add_change(start2, end2, new_content2) result = collector2.get_changed() - import_tools = importutils.ImportTools(self.pycore) + import_tools = importutils.ImportTools(self.project) new_imports = self._get_used_imports(import_tools) if new_imports: - goal_pymodule = self.pycore.get_string_module(result, - resource2) + goal_pymodule = libutils.get_string_module( + self.project, result, resource2) result = _add_imports_to_module( import_tools, goal_pymodule, new_imports) if resource2 in resources: @@ -108,13 +109,13 @@ def get_method_name(self): return self.method_name def _get_used_imports(self, import_tools): - return importutils.get_imports(self.pycore, self.pyfunction) + return importutils.get_imports(self.project, self.pyfunction) def _get_changes_made_by_old_class(self, dest_attr, new_name): pymodule = self.pyfunction.get_module() indents = self._get_scope_indents(self.pyfunction) - body = 'return self.%s.%s(%s)\n' % (dest_attr, new_name, - self._get_passed_arguments_string()) + body = 'return self.%s.%s(%s)\n' % ( + dest_attr, new_name, self._get_passed_arguments_string()) region = sourceutils.get_body_region(self.pyfunction) return (pymodule.get_resource(), region[0], region[1], sourceutils.fix_indentation(body, indents)) @@ -123,7 +124,7 @@ def _get_scope_indents(self, pyobject): pymodule = pyobject.get_module() return sourceutils.get_indents( pymodule.lines, pyobject.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) + sourceutils.get_indent(self.project) def _get_changes_made_by_new_class(self, dest_attr, new_name): old_pyclass = self.pyfunction.parent @@ -150,7 +151,7 @@ def get_new_method(self, name): return '%s\n%s' % ( self._get_new_header(name), sourceutils.fix_indentation(self._get_body(), - sourceutils.get_indent(self.pycore))) + sourceutils.get_indent(self.project))) def _get_unchanged_body(self): return sourceutils.get_body(self.pyfunction) @@ -158,9 +159,9 @@ def _get_unchanged_body(self): def _get_body(self, host='host'): self_name = self._get_self_name() body = self_name + ' = None\n' + self._get_unchanged_body() - pymodule = self.pycore.get_string_module(body) + pymodule = libutils.get_string_module(self.project, body) finder = occurrences.create_finder( - self.pycore, self_name, pymodule[self_name]) + self.project, self_name, pymodule[self_name]) result = rename.rename_in_module(finder, host, pymodule=pymodule) if result is None: result = body @@ -199,26 +200,28 @@ class MoveGlobal(object): """For moving global function and classes""" def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) + self.project = project + this_pymodule = self.project.get_pymodule(resource) self.old_pyname = evaluate.eval_location(this_pymodule, offset) self.old_name = self.old_pyname.get_object().get_name() pymodule = self.old_pyname.get_object().get_module() self.source = pymodule.get_resource() - self.tools = _MoveTools(self.pycore, self.source, + self.tools = _MoveTools(self.project, self.source, self.old_pyname, self.old_name) self.import_tools = self.tools.import_tools self._check_exceptional_conditions() def _check_exceptional_conditions(self): if self.old_pyname is None or \ - not isinstance(self.old_pyname.get_object(), pyobjects.PyDefinedObject): + not isinstance(self.old_pyname.get_object(), + pyobjects.PyDefinedObject): raise exceptions.RefactoringError( 'Move refactoring should be performed on a class/function.') moving_pyobject = self.old_pyname.get_object() if not self._is_global(moving_pyobject): raise exceptions.RefactoringError( - 'Move refactoring should be performed on a global class/function.') + 'Move refactoring should be performed ' + + 'on a global class/function.') def _is_global(self, pyobject): return pyobject.get_scope().parent == pyobject.get_module().get_scope() @@ -226,7 +229,7 @@ def _is_global(self, pyobject): def get_changes(self, dest, resources=None, task_handle=taskhandle.NullTaskHandle()): if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() if dest is None or not dest.exists(): raise exceptions.RefactoringError( 'Move destination does not exist.') @@ -251,7 +254,7 @@ def _calculate_changes(self, dest, resources, task_handle): elif file_ == dest: changes.add_change(self._dest_module_changes(dest)) elif self.tools.occurs_in_module(resource=file_): - pymodule = self.pycore.resource_to_pyobject(file_) + pymodule = self.project.get_pymodule(file_) # Changing occurrences placeholder = '__rope_renaming_%s_' % self.old_name source = self.tools.rename_in_module(placeholder, @@ -264,7 +267,8 @@ def _calculate_changes(self, dest, resources, task_handle): if should_import: pymodule = self.tools.new_pymodule(pymodule, source) source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) + self.project, pymodule, self._new_modname(dest), + self.old_name) source = source.replace(placeholder, imported) source = self.tools.new_source(pymodule, source) if source != file_.read(): @@ -276,25 +280,26 @@ def _source_module_changes(self, dest): placeholder = '__rope_moving_%s_' % self.old_name handle = _ChangeMoveOccurrencesHandle(placeholder) occurrence_finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname) + self.project, self.old_name, self.old_pyname) start, end = self._get_moving_region() renamer = ModuleSkipRenamer(occurrence_finder, self.source, handle, start, end) source = renamer.get_changed_module() if handle.occurred: - pymodule = self.pycore.get_string_module(source, self.source) + pymodule = libutils.get_string_module( + self.project, source, self.source) # Adding new import source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) + self.project, pymodule, self._new_modname(dest), self.old_name) source = source.replace(placeholder, imported) return ChangeContents(self.source, source) def _new_modname(self, dest): - return self.pycore.modname(dest) + return libutils.modname(dest) def _dest_module_changes(self, dest): # Changing occurrences - pymodule = self.pycore.resource_to_pyobject(dest) + pymodule = self.project.get_pymodule(dest) source = self.tools.rename_in_module(self.old_name, pymodule) pymodule = self.tools.new_pymodule(pymodule, source) @@ -310,7 +315,8 @@ def _dest_module_changes(self, dest): lineno = module_with_imports.imports[-1].end_line - 1 else: while lineno < pymodule.lines.length() and \ - pymodule.lines.get_line(lineno + 1).lstrip().startswith('#'): + pymodule.lines.get_line(lineno + 1).\ + lstrip().startswith('#'): lineno += 1 if lineno > 0: cut = pymodule.lines.get_line_end(lineno) + 1 @@ -320,17 +326,18 @@ def _dest_module_changes(self, dest): # Organizing imports source = result - pymodule = self.pycore.get_string_module(source, dest) + pymodule = libutils.get_string_module(self.project, source, dest) source = self.import_tools.organize_imports(pymodule, sort=False, unused=False) return ChangeContents(dest, source) def _get_moving_element_with_imports(self): return moving_code_with_imports( - self.pycore, self.source, self._get_moving_element()) + self.project, self.source, self._get_moving_element()) def _get_module_with_imports(self, source_code, resource): - pymodule = self.pycore.get_string_module(source_code, resource) + pymodule = libutils.get_string_module( + self.project, source_code, resource) return self.import_tools.module_imports(pymodule) def _get_moving_element(self): @@ -339,13 +346,13 @@ def _get_moving_element(self): return moving.rstrip() + '\n' def _get_moving_region(self): - pymodule = self.pycore.resource_to_pyobject(self.source) + pymodule = self.project.get_pymodule(self.source) lines = pymodule.lines scope = self.old_pyname.get_object().get_scope() start = lines.get_line_start(scope.get_start()) end_line = scope.get_end() while end_line < lines.length() and \ - lines.get_line(end_line + 1).strip() == '': + lines.get_line(end_line + 1).strip() == '': end_line += 1 end = min(lines.get_line_end(end_line) + 1, len(pymodule.source_code)) return start, end @@ -356,7 +363,8 @@ def _add_imports2(self, pymodule, new_imports): return pymodule, False else: resource = pymodule.get_resource() - pymodule = self.pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module( + self.project, source, resource) return pymodule, True @@ -365,13 +373,12 @@ class MoveModule(object): def __init__(self, project, resource): self.project = project - self.pycore = project.pycore if not resource.is_folder() and resource.name == '__init__.py': resource = resource.parent if resource.is_folder() and not resource.has_child('__init__.py'): raise exceptions.RefactoringError( 'Cannot move non-package folder.') - dummy_pymodule = self.pycore.get_string_module('') + dummy_pymodule = libutils.get_string_module(self.project, '') self.old_pyname = pynames.ImportedModule(dummy_pymodule, resource=resource) self.source = self.old_pyname.get_object().get_resource() @@ -379,15 +386,14 @@ def __init__(self, project, resource): self.old_name = self.source.name else: self.old_name = self.source.name[:-3] - self.tools = _MoveTools(self.pycore, self.source, + self.tools = _MoveTools(self.project, self.source, self.old_pyname, self.old_name) self.import_tools = self.tools.import_tools def get_changes(self, dest, resources=None, task_handle=taskhandle.NullTaskHandle()): - moving_pyobject = self.old_pyname.get_object() if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() if dest is None or not dest.is_folder(): raise exceptions.RefactoringError( 'Move destination for modules should be packages.') @@ -412,7 +418,7 @@ def _calculate_changes(self, dest, resources, task_handle): return changes def _new_modname(self, dest): - destname = self.pycore.modname(dest) + destname = libutils.modname(dest) if destname: return destname + '.' + self.old_name return self.old_name @@ -422,7 +428,7 @@ def _new_import(self, dest): def _change_moving_module(self, changes, dest): if not self.source.is_folder(): - pymodule = self.pycore.resource_to_pyobject(self.source) + pymodule = self.project.get_pymodule(self.source) source = self.import_tools.relatives_to_absolutes(pymodule) pymodule = self.tools.new_pymodule(pymodule, source) source = self._change_occurrences_in_module(dest, pymodule) @@ -436,11 +442,24 @@ def _change_occurrences_in_module(self, dest, pymodule=None, resource=resource): return if pymodule is None: - pymodule = self.pycore.resource_to_pyobject(resource) + pymodule = self.project.get_pymodule(resource) new_name = self._new_modname(dest) + module_imports = importutils.get_module_imports(self.project, pymodule) + changed = False + + source = None + if libutils.modname(dest): + changed = self._change_import_statements(dest, new_name, + module_imports) + if changed: + source = module_imports.get_changed_source() + source = self.tools.new_source(pymodule, source) + pymodule = self.tools.new_pymodule(pymodule, source) + new_import = self._new_import(dest) source = self.tools.rename_in_module( - new_name, imports=True, pymodule=pymodule, resource=resource) + new_name, imports=True, pymodule=pymodule, + resource=resource if not changed else None) should_import = self.tools.occurs_in_module( pymodule=pymodule, resource=resource, imports=False) pymodule = self.tools.new_pymodule(pymodule, source) @@ -449,8 +468,75 @@ def _change_occurrences_in_module(self, dest, pymodule=None, pymodule = self.tools.new_pymodule(pymodule, source) source = self.tools.add_imports(pymodule, [new_import]) source = self.tools.new_source(pymodule, source) - if source != pymodule.resource.read(): + if source is not None and source != pymodule.resource.read(): return source + return None + + + def _change_import_statements(self, dest, new_name, module_imports): + moving_module = self.source + parent_module = moving_module.parent + + changed = False + for import_stmt in module_imports.imports: + if not any(name_and_alias[0] == self.old_name + for name_and_alias in + import_stmt.import_info.names_and_aliases) and \ + not any(name_and_alias[0] == libutils.modname(self.source) + for name_and_alias in + import_stmt.import_info.names_and_aliases): + continue + + # Case 1: Look for normal imports of the moving module. + if isinstance(import_stmt.import_info, importutils.NormalImport): + continue + + # Case 2: The moving module is from-imported. + changed = self._handle_moving_in_from_import_stmt( + dest, import_stmt, module_imports, parent_module) or changed + + # Case 3: Names are imported from the moving module. + context = importutils.importinfo.ImportContext(self.project, None) + if not import_stmt.import_info.is_empty() and \ + import_stmt.import_info.get_imported_resource(context) == \ + moving_module: + import_stmt.import_info = importutils.FromImport( + new_name, import_stmt.import_info.level, + import_stmt.import_info.names_and_aliases) + changed = True + + return changed + + def _handle_moving_in_from_import_stmt(self, dest, import_stmt, + module_imports, parent_module): + changed = False + context = importutils.importinfo.ImportContext(self.project, None) + if import_stmt.import_info.get_imported_resource(context) == \ + parent_module: + imports = import_stmt.import_info.names_and_aliases + new_imports = [] + for name, alias in imports: + # The moving module was imported. + if name == self.old_name: + changed = True + new_import = importutils.FromImport( + libutils.modname(dest), 0, + [(self.old_name, alias)]) + module_imports.add_import(new_import) + else: + new_imports.append((name, alias)) + + # Update the imports if the imported names were changed. + if new_imports != imports: + changed = True + if new_imports: + import_stmt.import_info = importutils.FromImport( + import_stmt.import_info.module_name, + import_stmt.import_info.level, + new_imports) + else: + import_stmt.empty_import() + return changed class _ChangeMoveOccurrencesHandle(object): @@ -470,20 +556,22 @@ def occurred_outside_skip(self, change_collector, occurrence): class _MoveTools(object): - def __init__(self, pycore, source, pyname, old_name): - self.pycore = pycore + def __init__(self, project, source, pyname, old_name): + self.project = project self.source = source self.old_pyname = pyname self.old_name = old_name - self.import_tools = importutils.ImportTools(self.pycore) + self.import_tools = importutils.ImportTools(self.project) def remove_old_imports(self, pymodule): old_source = pymodule.source_code module_with_imports = self.import_tools.module_imports(pymodule) + class CanSelect(object): changed = False old_name = self.old_name old_pyname = self.old_pyname + def __call__(self, name): try: if name == self.old_name and \ @@ -501,7 +589,7 @@ def __call__(self, name): return new_source def rename_in_module(self, new_name, pymodule=None, - imports=False, resource=None): + imports=False, resource=None): occurrence_finder = self._create_finder(imports) source = rename.rename_in_module( occurrence_finder, new_name, replace_primary=True, @@ -516,13 +604,13 @@ def occurs_in_module(self, pymodule=None, resource=None, imports=True): return False def _create_finder(self, imports): - return occurrences.create_finder(self.pycore, self.old_name, + return occurrences.create_finder(self.project, self.old_name, self.old_pyname, imports=imports) def new_pymodule(self, pymodule, source): if source is not None: - return self.pycore.get_string_module( - source, pymodule.get_resource()) + return libutils.get_string_module( + self.project, source, pymodule.get_resource()) return pymodule def new_source(self, pymodule, source): @@ -541,10 +629,10 @@ def _add_imports_to_module(import_tools, pymodule, new_imports): return module_with_imports.get_changed_source() -def moving_code_with_imports(pycore, resource, source): - import_tools = importutils.ImportTools(pycore) - pymodule = pycore.get_string_module(source, resource) - origin = pycore.resource_to_pyobject(resource) +def moving_code_with_imports(project, resource, source): + import_tools = importutils.ImportTools(project) + pymodule = libutils.get_string_module(project, source, resource) + origin = project.get_pymodule(resource) imports = [] for stmt in import_tools.module_imports(origin).imports: @@ -557,12 +645,12 @@ def moving_code_with_imports(pycore, resource, source): imports.append(import_tools.get_from_import(resource, back_names)) source = _add_imports_to_module(import_tools, pymodule, imports) - pymodule = pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module(project, source, resource) source = import_tools.relatives_to_absolutes(pymodule) - pymodule = pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module(project, source, resource) source = import_tools.organize_imports(pymodule, selfs=False) - pymodule = pycore.get_string_module(source, resource) + pymodule = libutils.get_string_module(project, source, resource) # extracting imports after changes module_imports = import_tools.module_imports(pymodule) @@ -610,7 +698,7 @@ def __init__(self, occurrence_finder, resource, handle=None, self.replacement = replacement self.handle = handle if self.handle is None: - self.handle = ModuleSkipHandle() + self.handle = ModuleSkipRenamerHandle() def get_changed_module(self): source = self.resource.read() @@ -618,7 +706,8 @@ def get_changed_module(self): if self.replacement is not None: change_collector.add_change(self.skip_start, self.skip_end, self.replacement) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): + for occurrence in self.occurrence_finder.find_occurrences( + self.resource): start, end = occurrence.get_primary_range() if self.skip_start <= start < self.skip_end: self.handle.occurred_inside_skip(change_collector, occurrence) diff --git a/pymode/libs2/rope/refactor/multiproject.py b/pymode/libs2/rope/refactor/multiproject.py index 6a85d2a2..ac243bda 100644 --- a/pymode/libs2/rope/refactor/multiproject.py +++ b/pymode/libs2/rope/refactor/multiproject.py @@ -1,11 +1,11 @@ """This module can be used for performing cross-project refactorings -See the "cross-project refactorings" section of ``docs/library.txt`` +See the "cross-project refactorings" section of ``docs/library.rst`` file. """ -from rope.base import resources, project, libutils +from rope.base import resources, libutils class MultiProjectRefactoring(object): @@ -33,7 +33,7 @@ def __init__(self, refactoring, other_projects, addpath, self.refactoring = refactoring self.projects = [project] + other_projects for other_project in other_projects: - for folder in self.project.pycore.get_source_folders(): + for folder in self.project.get_source_folders(): other_project.get_prefs().add('python_path', folder.real_path) self.refactorings = [] for other in self.projects: @@ -57,7 +57,7 @@ def _resources_for_args(self, project, args, kwds): newkwds = dict((name, self._change_project_resource(project, value)) for name, value in kwds.items()) return newargs, newkwds - + def _change_project_resource(self, project, obj): if isinstance(obj, resources.Resource) and \ obj.project != project: diff --git a/pymode/libs2/rope/refactor/occurrences.py b/pymode/libs2/rope/refactor/occurrences.py index 2808ed2c..14a2d7de 100644 --- a/pymode/libs2/rope/refactor/occurrences.py +++ b/pymode/libs2/rope/refactor/occurrences.py @@ -1,7 +1,46 @@ +"""Find occurrences of a name in a project. + +This module consists of a `Finder` that finds all occurrences of a name +in a project. The `Finder.find_occurrences()` method is a generator that +yields `Occurrence` instances for each occurrence of the name. To create +a `Finder` object, use the `create_finder()` function: + + finder = occurrences.create_finder(project, 'foo', pyname) + for occurrence in finder.find_occurrences(): + pass + +It's possible to filter the occurrences. They can be specified when +calling the `create_finder()` function. + + * `only_calls`: If True, return only those instances where the name is + a function that's being called. + + * `imports`: If False, don't return instances that are in import + statements. + + * `unsure`: If a prediate function, return instances where we don't + know what the name references. It also filters based on the + predicate function. + + * `docs`: If True, it will search for occurrences in regions normally + ignored. E.g., strings and comments. + + * `in_hierarchy`: If True, it will find occurrences if the name is in + the class's hierarchy. + + * `instance`: Used only when you want implicit interfaces to be + considered. +""" + import re -import rope.base.pynames -from rope.base import pynames, pyobjects, codeanalyze, evaluate, exceptions, utils, worder +from rope.base import codeanalyze +from rope.base import evaluate +from rope.base import exceptions +from rope.base import pynames +from rope.base import pyobjects +from rope.base import utils +from rope.base import worder class Finder(object): @@ -19,8 +58,8 @@ class Finder(object): """ - def __init__(self, pycore, name, filters=[lambda o: True], docs=False): - self.pycore = pycore + def __init__(self, project, name, filters=[lambda o: True], docs=False): + self.project = project self.name = name self.docs = docs self.filters = filters @@ -28,7 +67,7 @@ def __init__(self, pycore, name, filters=[lambda o: True], docs=False): def find_occurrences(self, resource=None, pymodule=None): """Generate `Occurrence` instances""" - tools = _OccurrenceToolsCreator(self.pycore, resource=resource, + tools = _OccurrenceToolsCreator(self.project, resource=resource, pymodule=pymodule, docs=self.docs) for offset in self._textual_finder.find_offsets(tools.source_code): occurrence = Occurrence(tools, offset) @@ -41,7 +80,7 @@ def find_occurrences(self, resource=None, pymodule=None): break -def create_finder(pycore, name, pyname, only_calls=False, imports=True, +def create_finder(project, name, pyname, only_calls=False, imports=True, unsure=None, docs=False, instance=None, in_hierarchy=False): """A factory for `Finder` @@ -50,25 +89,25 @@ def create_finder(pycore, name, pyname, only_calls=False, imports=True, considered. """ - pynames = set([pyname]) + pynames_ = set([pyname]) filters = [] if only_calls: filters.append(CallsFilter()) if not imports: filters.append(NoImportsFilter()) - if isinstance(instance, rope.base.pynames.ParameterName): + if isinstance(instance, pynames.ParameterName): for pyobject in instance.get_objects(): try: - pynames.add(pyobject[name]) + pynames_.add(pyobject[name]) except exceptions.AttributeNotFoundError: pass - for pyname in pynames: + for pyname in pynames_: filters.append(PyNameFilter(pyname)) if in_hierarchy: filters.append(InHierarchyFilter(pyname)) if unsure: filters.append(UnsureFilter(unsure)) - return Finder(pycore, name, filters=filters, docs=docs) + return Finder(project, name, filters=filters, docs=docs) class Occurrence(object): @@ -96,7 +135,8 @@ def get_pyname(self): @utils.saveit def get_primary_and_pyname(self): try: - return self.tools.name_finder.get_primary_and_pyname_at(self.offset) + return self.tools.name_finder.get_primary_and_pyname_at( + self.offset) except exceptions.BadIdentifierError: pass @@ -109,11 +149,13 @@ def is_called(self): return self.tools.word_finder.is_a_function_being_called(self.offset) def is_defined(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) + return self.tools.word_finder.is_a_class_or_function_name_in_header( + self.offset) def is_a_fixed_primary(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) or \ - self.tools.word_finder.is_a_name_after_from_import(self.offset) + return self.tools.word_finder.is_a_class_or_function_name_in_header( + self.offset) or \ + self.tools.word_finder.is_a_name_after_from_import(self.offset) def is_written(self): return self.tools.word_finder.is_assigned_here(self.offset) @@ -134,11 +176,14 @@ def same_pyname(expected, pyname): return False if expected == pyname: return True - if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) and \ - type(pyname) not in (pynames.ImportedModule, pynames.ImportedName): + if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) \ + and type(pyname) not in \ + (pynames.ImportedModule, pynames.ImportedName): return False - return expected.get_definition_location() == pyname.get_definition_location() and \ - expected.get_object() == pyname.get_object() + return expected.get_definition_location() == \ + pyname.get_definition_location() and \ + expected.get_object() == pyname.get_object() + def unsure_pyname(pyname, unbound=True): """Return `True` if we don't know what this name references""" @@ -151,7 +196,7 @@ def unsure_pyname(pyname, unbound=True): class PyNameFilter(object): - """For finding occurrences of a name""" + """For finding occurrences of a name.""" def __init__(self, pyname): self.pyname = pyname @@ -162,7 +207,7 @@ def __call__(self, occurrence): class InHierarchyFilter(object): - """For finding occurrences of a name""" + """Finds the occurrence if the name is in the class's hierarchy.""" def __init__(self, pyname, implementations_only=False): self.pyname = pyname @@ -203,6 +248,7 @@ def _get_root_classes(self, pyclass, name): class UnsureFilter(object): + """Occurrences where we don't knoow what the name references.""" def __init__(self, unsure): self.unsure = unsure @@ -213,6 +259,7 @@ def __call__(self, occurrence): class NoImportsFilter(object): + """Don't include import statements as occurrences.""" def __call__(self, occurrence): if occurrence.is_in_import_statement(): @@ -220,6 +267,7 @@ def __call__(self, occurrence): class CallsFilter(object): + """Filter out non-call occurrences.""" def __call__(self, occurrence): if not occurrence.is_called(): @@ -258,8 +306,10 @@ def _normal_search(self, source): try: found = source.index(self.name, current) current = found + len(self.name) - if (found == 0 or not self._is_id_char(source[found - 1])) and \ - (current == len(source) or not self._is_id_char(source[current])): + if (found == 0 or + not self._is_id_char(source[found - 1])) and \ + (current == len(source) or + not self._is_id_char(source[current])): yield found except ValueError: break @@ -282,7 +332,7 @@ def _get_source(self, resource, pymodule): def _get_occurrence_pattern(self, name): occurrence_pattern = _TextualFinder.any('occurrence', - ['\\b' + name + '\\b']) + ['\\b' + name + '\\b']) pattern = re.compile(occurrence_pattern + '|' + self.comment_pattern + '|' + self.string_pattern) return pattern @@ -294,8 +344,8 @@ def any(name, list_): class _OccurrenceToolsCreator(object): - def __init__(self, pycore, resource=None, pymodule=None, docs=False): - self.pycore = pycore + def __init__(self, project, resource=None, pymodule=None, docs=False): + self.project = project self.__resource = resource self.__pymodule = pymodule self.docs = docs @@ -331,4 +381,4 @@ def resource(self): def pymodule(self): if self.__pymodule is not None: return self.__pymodule - return self.pycore.resource_to_pyobject(self.resource) + return self.project.get_pymodule(self.resource) diff --git a/pymode/libs2/rope/refactor/patchedast.py b/pymode/libs2/rope/refactor/patchedast.py index 88fa4d85..28d36d5a 100644 --- a/pymode/libs2/rope/refactor/patchedast.py +++ b/pymode/libs2/rope/refactor/patchedast.py @@ -68,6 +68,7 @@ def __init__(self, source, children=False): Number = object() String = object() + semicolon_or_as_in_except = object() def __call__(self, node): method = getattr(self, '_' + node.__class__.__name__, None) @@ -111,6 +112,10 @@ def _handle(self, node, base_children, eat_parens=False, eat_spaces=False): elif child == '!=': # INFO: This has been added to handle deprecated ``<>`` region = self.source.consume_not_equal() + elif child == self.semicolon_or_as_in_except: + # INFO: This has been added to handle deprecated + # semicolon in except + region = self.source.consume_except_as_or_semicolon() else: region = self.source.consume(child) child = self.source[region[0]:region[1]] @@ -205,16 +210,17 @@ def _find_next_statement_start(self): for child in children: if isinstance(child, ast.stmt): return child.col_offset \ - + self.lines.get_line_start(child.lineno) + + self.lines.get_line_start(child.lineno) return len(self.source.source) - _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', 'Mult': '*', - 'Div': '/', 'Mod': '%', 'Pow': '**', 'LShift': '<<', - 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', 'BitXor': '^', - 'FloorDiv': '//', 'Invert': '~', 'Not': 'not', 'UAdd': '+', - 'USub': '-', 'Eq': '==', 'NotEq': '!=', 'Lt': '<', - 'LtE': '<=', 'Gt': '>', 'GtE': '>=', 'Is': 'is', - 'IsNot': 'is not', 'In': 'in', 'NotIn': 'not in'} + _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', + 'Mult': '*', 'Div': '/', 'Mod': '%', 'Pow': '**', + 'LShift': '<<', 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', + 'BitXor': '^', 'FloorDiv': '//', 'Invert': '~', + 'Not': 'not', 'UAdd': '+', 'USub': '-', 'Eq': '==', + 'NotEq': '!=', 'Lt': '<', 'LtE': '<=', 'Gt': '>', + 'GtE': '>=', 'Is': 'is', 'IsNot': 'is not', 'In': 'in', + 'NotIn': 'not in'} def _get_op(self, node): return self._operators[node.__class__.__name__].split(' ') @@ -351,7 +357,8 @@ def _ImportFrom(self, node): children = ['from'] if node.level: children.append('.' * node.level) - children.extend([node.module or '', # see comment at rope.base.ast.walk + # see comment at rope.base.ast.walk + children.extend([node.module or '', 'import']) children.extend(self._child_nodes(node.names, ',')) self._handle(node, children) @@ -380,7 +387,8 @@ def _FunctionDef(self, node): def _arguments(self, node): children = [] args = list(node.args) - defaults = [None] * (len(args) - len(node.defaults)) + list(node.defaults) + defaults = [None] * (len(args) - len(node.defaults)) + \ + list(node.defaults) for index, (arg, default) in enumerate(zip(args, defaults)): if index > 0: children.append(',') @@ -568,13 +576,16 @@ def _ExceptHandler(self, node): self._excepthandler(node) def _excepthandler(self, node): + # self._handle(node, [self.semicolon_or_as_in_except]) children = ['except'] if node.type: children.append(node.type) if node.name: - children.extend([',', node.name]) + children.append(self.semicolon_or_as_in_except) + children.append(node.name) children.append(':') children.extend(node.body) + self._handle(node, children) def _Tuple(self, node): @@ -663,6 +674,10 @@ def consume_not_equal(self): repattern = _Source._not_equals_pattern return self._consume_pattern(repattern) + def consume_except_as_or_semicolon(self): + repattern = re.compile(r'as|,') + return self._consume_pattern(repattern) + def _good_token(self, token, offset, start=None): """Checks whether consumed token is in comments""" if start is None: diff --git a/pymode/libs2/rope/refactor/rename.py b/pymode/libs2/rope/refactor/rename.py index 65e6e1d5..3f1f5b7e 100644 --- a/pymode/libs2/rope/refactor/rename.py +++ b/pymode/libs2/rope/refactor/rename.py @@ -1,8 +1,9 @@ import warnings -from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze +from rope.base import (exceptions, pyobjects, pynames, taskhandle, + evaluate, worder, codeanalyze, libutils) from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import occurrences, sourceutils +from rope.refactor import occurrences class Rename(object): @@ -16,11 +17,10 @@ class Rename(object): def __init__(self, project, resource, offset=None): """If `offset` is None, the `resource` itself will be renamed""" self.project = project - self.pycore = project.pycore self.resource = resource if offset is not None: self.old_name = worder.get_name_at(self.resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) + this_pymodule = self.project.get_pymodule(self.resource) self.old_instance, self.old_pyname = \ evaluate.eval_location2(this_pymodule, offset) if self.old_pyname is None: @@ -30,7 +30,7 @@ def __init__(self, project, resource, offset=None): else: if not resource.is_folder() and resource.name == '__init__.py': resource = resource.parent - dummy_pymodule = self.pycore.get_string_module('') + dummy_pymodule = libutils.get_string_module(self.project, '') self.old_instance = None self.old_pyname = pynames.ImportedModule(dummy_pymodule, resource=resource) @@ -70,6 +70,7 @@ def get_changes(self, new_name, in_file=None, in_hierarchy=False, warnings.warn( 'unsure parameter should be a function that returns ' 'True or False', DeprecationWarning, stacklevel=2) + def unsure_func(value=unsure): return value unsure = unsure_func @@ -82,14 +83,15 @@ def unsure_func(value=unsure): if _is_local(self.old_pyname): resources = [self.resource] if resources is None: - resources = self.pycore.get_python_files() + resources = self.project.get_python_files() changes = ChangeSet('Renaming <%s> to <%s>' % (self.old_name, new_name)) finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, unsure=unsure, + self.project, self.old_name, self.old_pyname, unsure=unsure, docs=docs, instance=self.old_instance, in_hierarchy=in_hierarchy and self.is_method()) - job_set = task_handle.create_jobset('Collecting Changes', len(resources)) + job_set = task_handle.create_jobset('Collecting Changes', + len(resources)) for file_ in resources: job_set.started_job(file_.path) new_content = rename_in_module(finder, new_name, resource=file_) @@ -119,8 +121,8 @@ def _is_renaming_a_module(self): def is_method(self): pyname = self.old_pyname return isinstance(pyname, pynames.DefinedName) and \ - isinstance(pyname.get_object(), pyobjects.PyFunction) and \ - isinstance(pyname.get_object().parent, pyobjects.PyClass) + isinstance(pyname.get_object(), pyobjects.PyFunction) and \ + isinstance(pyname.get_object().parent, pyobjects.PyClass) def _rename_module(self, resource, new_name, changes): if not resource.is_folder(): @@ -147,11 +149,11 @@ class ChangeOccurrences(object): """ def __init__(self, project, resource, offset): - self.pycore = project.pycore + self.project = project self.resource = resource self.offset = offset self.old_name = worder.get_name_at(resource, offset) - self.pymodule = self.pycore.resource_to_pyobject(self.resource) + self.pymodule = project.get_pymodule(self.resource) self.old_pyname = evaluate.eval_location(self.pymodule, offset) def get_old_name(self): @@ -161,7 +163,7 @@ def get_old_name(self): def _get_scope_offset(self): lines = self.pymodule.lines scope = self.pymodule.get_scope().\ - get_inner_scope_for_line(lines.get_line_number(self.offset)) + get_inner_scope_for_line(lines.get_line_number(self.offset)) start = lines.get_line_start(scope.get_start()) end = lines.get_line_end(scope.get_end()) return start, end @@ -171,7 +173,7 @@ def get_changes(self, new_name, only_calls=False, reads=True, writes=True): (self.old_name, new_name)) scope_start, scope_end = self._get_scope_offset() finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, + self.project, self.old_name, self.old_pyname, imports=False, only_calls=only_calls) new_contents = rename_in_module( finder, new_name, pymodule=self.pymodule, replace_primary=True, @@ -181,8 +183,9 @@ def get_changes(self, new_name, only_calls=False, reads=True, writes=True): return changes -def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, - replace_primary=False, region=None, reads=True, writes=True): +def rename_in_module(occurrences_finder, new_name, resource=None, + pymodule=None, replace_primary=False, region=None, + reads=True, writes=True): """Returns the changed source or `None` if there is no changes""" if resource is not None: source_code = resource.read() @@ -203,6 +206,7 @@ def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, change_collector.add_change(start, end, new_name) return change_collector.get_changed() + def _is_local(pyname): module, lineno = pyname.get_definition_location() if lineno is None: @@ -212,5 +216,5 @@ def _is_local(pyname): scope.get_kind() in ('Function', 'Class'): scope = scope.parent return scope.get_kind() == 'Function' and \ - pyname in scope.get_names().values() and \ - isinstance(pyname, pynames.AssignedName) + pyname in scope.get_names().values() and \ + isinstance(pyname, pynames.AssignedName) diff --git a/pymode/libs2/rope/refactor/restructure.py b/pymode/libs2/rope/refactor/restructure.py index 1573c2fe..98a11e3d 100644 --- a/pymode/libs2/rope/refactor/restructure.py +++ b/pymode/libs2/rope/refactor/restructure.py @@ -1,6 +1,7 @@ import warnings from rope.base import change, taskhandle, builtins, ast, codeanalyze +from rope.base import libutils from rope.refactor import patchedast, similarfinder, sourceutils from rope.refactor.importutils import module_imports @@ -52,7 +53,6 @@ class Restructure(object): from rope.contrib import generate args - pycore: type=rope.base.pycore.PyCore project: type=rope.base.project.Project Example #4:: @@ -79,7 +79,7 @@ def __init__(self, project, pattern, goal, args=None, See class pydoc for more info about the arguments. """ - self.pycore = project.pycore + self.project = project self.pattern = pattern self.goal = goal self.args = args @@ -132,13 +132,13 @@ def get_changes(self, checks=None, imports=None, resources=None, (self.pattern, self.goal)) if resources is not None: files = [resource for resource in resources - if self.pycore.is_python_file(resource)] + if libutils.is_python_file(self.project, resource)] else: - files = self.pycore.get_python_files() + files = self.project.get_python_files() job_set = task_handle.create_jobset('Collecting Changes', len(files)) for resource in files: job_set.started_job(resource.path) - pymodule = self.pycore.resource_to_pyobject(resource) + pymodule = self.project.get_pymodule(resource) finder = similarfinder.SimilarFinder(pymodule, wildcards=self.wildcards) matches = list(finder.get_matches(self.pattern, self.args)) @@ -161,16 +161,16 @@ def _add_imports(self, resource, source, imports): if not imports: return source import_infos = self._get_import_infos(resource, imports) - pymodule = self.pycore.get_string_module(source, resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) + pymodule = libutils.get_string_module(self.project, source, resource) + imports = module_imports.ModuleImports(self.project, pymodule) for import_info in import_infos: imports.add_import(import_info) return imports.get_changed_source() def _get_import_infos(self, resource, imports): - pymodule = self.pycore.get_string_module('\n'.join(imports), - resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) + pymodule = libutils.get_string_module( + self.project, '\n'.join(imports), resource) + imports = module_imports.ModuleImports(self.project, pymodule) return [imports.import_info for imports in imports.imports] @@ -183,7 +183,7 @@ def make_checks(self, string_checks): checks = {} for key, value in string_checks.items(): is_pyname = not key.endswith('.object') and \ - not key.endswith('.type') + not key.endswith('.type') evaluated = self._evaluate(value, is_pyname=is_pyname) if evaluated is not None: checks[key] = evaluated @@ -198,7 +198,7 @@ def get_attribute(self, name): return builtins.builtins[name] pyobject = _BuiltinsStub() else: - pyobject = self.pycore.get_module(attributes[0]) + pyobject = self.project.get_module(attributes[0]) for attribute in attributes[1:]: pyname = pyobject[attribute] if pyname is None: diff --git a/pymode/libs2/rope/refactor/similarfinder.py b/pymode/libs2/rope/refactor/similarfinder.py index fc71abfa..f1a7d42d 100644 --- a/pymode/libs2/rope/refactor/similarfinder.py +++ b/pymode/libs2/rope/refactor/similarfinder.py @@ -2,9 +2,11 @@ import re import rope.refactor.wildcards -from rope.base import codeanalyze, evaluate, exceptions, ast, builtins -from rope.refactor import (patchedast, sourceutils, occurrences, - wildcards, importutils) +from rope.base import libutils +from rope.base import codeanalyze, exceptions, ast, builtins +from rope.refactor import (patchedast, wildcards) + +from rope.refactor.patchedast import MismatchedTokenError class BadNameInCheckError(exceptions.RefactoringError): @@ -22,8 +24,12 @@ class SimilarFinder(object): def __init__(self, pymodule, wildcards=None): """Construct a SimilarFinder""" self.source = pymodule.source_code - self.raw_finder = RawSimilarFinder( - pymodule.source_code, pymodule.get_ast(), self._does_match) + try: + self.raw_finder = RawSimilarFinder( + pymodule.source_code, pymodule.get_ast(), self._does_match) + except MismatchedTokenError: + print "in file %s" % pymodule.resource.path + raise self.pymodule = pymodule if wildcards is None: self.wildcards = {} @@ -41,7 +47,7 @@ def get_matches(self, code, args={}, start=0, end=None): if 'skip' in args.get('', {}): resource, region = args['']['skip'] if resource == self.pymodule.get_resource(): - skip_region = region + skip_region = region return self.raw_finder.get_matches(code, start=start, end=end, skip=skip_region) @@ -97,7 +103,7 @@ def get_matches(self, code, start=0, end=None, skip=None): if start <= match_start and match_end <= end: if skip is not None and (skip[0] < match_end and skip[1] > match_start): - continue + continue yield match def _get_matched_asts(self, code): @@ -175,8 +181,8 @@ def __check_stmt_list(self, nodes): def _match_nodes(self, expected, node, mapping): if isinstance(expected, ast.Name): - if self.ropevar.is_var(expected.id): - return self._match_wildcard(expected, node, mapping) + if self.ropevar.is_var(expected.id): + return self._match_wildcard(expected, node, mapping) if not isinstance(expected, ast.AST): return expected == node if expected.__class__ != node.__class__: @@ -296,8 +302,8 @@ def substitute(self, mapping): def _get_pattern(cls): if cls._match_pattern is None: pattern = codeanalyze.get_comment_pattern() + '|' + \ - codeanalyze.get_string_pattern() + '|' + \ - r'(?P\$\{[^\s\$\}]*\})' + codeanalyze.get_string_pattern() + '|' + \ + r'(?P\$\{[^\s\$\}]*\})' cls._match_pattern = re.compile(pattern) return cls._match_pattern @@ -339,6 +345,7 @@ def _is_var(self, name): def make_pattern(code, variables): variables = set(variables) collector = codeanalyze.ChangeCollector(code) + def does_match(node, name): return isinstance(node, ast.Name) and node.id == name finder = RawSimilarFinder(code, does_match=does_match) @@ -352,11 +359,12 @@ def does_match(node, name): def _pydefined_to_str(pydefined): address = [] - if isinstance(pydefined, (builtins.BuiltinClass, builtins.BuiltinFunction)): + if isinstance(pydefined, + (builtins.BuiltinClass, builtins.BuiltinFunction)): return '__builtins__.' + pydefined.get_name() else: while pydefined.parent is not None: address.insert(0, pydefined.get_name()) pydefined = pydefined.parent - module_name = pydefined.pycore.modname(pydefined.resource) + module_name = libutils.modname(pydefined.resource) return '.'.join(module_name.split('.') + address) diff --git a/pymode/libs2/rope/refactor/sourceutils.py b/pymode/libs2/rope/refactor/sourceutils.py index f64213db..9b842906 100644 --- a/pymode/libs2/rope/refactor/sourceutils.py +++ b/pymode/libs2/rope/refactor/sourceutils.py @@ -1,4 +1,4 @@ -from rope.base import ast, codeanalyze +from rope.base import codeanalyze def get_indents(lines, lineno): @@ -48,7 +48,7 @@ def add_methods(pymodule, class_scope, methods_sources): methods = '\n\n' + '\n\n'.join(methods_sources) indented_methods = fix_indentation( methods, get_indents(lines, class_scope.get_start()) + - get_indent(pymodule.pycore)) + get_indent(pymodule.pycore.project)) result = [] result.append(source_code[:insertion_offset]) result.append(indented_methods) @@ -58,7 +58,7 @@ def add_methods(pymodule, class_scope, methods_sources): def get_body(pyfunction): """Return unindented function body""" - scope = pyfunction.get_scope() + # FIXME scope = pyfunction.get_scope() pymodule = pyfunction.get_module() start, end = get_body_region(pyfunction) return fix_indentation(pymodule.source_code[start:end], 0) @@ -87,6 +87,5 @@ def get_body_region(defined): return start, end -def get_indent(pycore): - project = pycore.project +def get_indent(project): return project.prefs.get('indent_size', 4) diff --git a/pymode/libs2/rope/refactor/suites.py b/pymode/libs2/rope/refactor/suites.py index d955c819..4f9a8c71 100644 --- a/pymode/libs2/rope/refactor/suites.py +++ b/pymode/libs2/rope/refactor/suites.py @@ -14,6 +14,7 @@ def find_visible_for_suite(root, lines): line2 = find_visible_for_suite(root, lines[1:]) suite1 = root.find_suite(line1) suite2 = root.find_suite(line2) + def valid(suite): return suite is not None and not suite.ignored if valid(suite1) and not valid(suite2): diff --git a/pymode/libs2/rope/refactor/topackage.py b/pymode/libs2/rope/refactor/topackage.py index b7113979..f36a6d52 100644 --- a/pymode/libs2/rope/refactor/topackage.py +++ b/pymode/libs2/rope/refactor/topackage.py @@ -1,12 +1,12 @@ import rope.refactor.importutils -from rope.base.change import ChangeSet, ChangeContents, MoveResource, CreateFolder +from rope.base.change import ChangeSet, ChangeContents, MoveResource, \ + CreateFolder class ModuleToPackage(object): def __init__(self, project, resource): self.project = project - self.pycore = project.pycore self.resource = resource def get_changes(self): @@ -27,6 +27,6 @@ def get_changes(self): return changes def _transform_relatives_to_absolute(self, resource): - pymodule = self.pycore.resource_to_pyobject(resource) - import_tools = rope.refactor.importutils.ImportTools(self.pycore) + pymodule = self.project.get_pymodule(resource) + import_tools = rope.refactor.importutils.ImportTools(self.project) return import_tools.relatives_to_absolutes(pymodule) diff --git a/pymode/libs2/rope/refactor/usefunction.py b/pymode/libs2/rope/refactor/usefunction.py index b0621525..85896a98 100644 --- a/pymode/libs2/rope/refactor/usefunction.py +++ b/pymode/libs2/rope/refactor/usefunction.py @@ -1,6 +1,7 @@ from rope.base import (change, taskhandle, evaluate, exceptions, pyobjects, pynames, ast) -from rope.refactor import restructure, sourceutils, similarfinder, importutils +from rope.base import libutils +from rope.refactor import restructure, sourceutils, similarfinder class UseFunction(object): @@ -9,7 +10,7 @@ class UseFunction(object): def __init__(self, project, resource, offset): self.project = project self.offset = offset - this_pymodule = project.pycore.resource_to_pyobject(resource) + this_pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) if pyname is None: raise exceptions.RefactoringError('Unresolvable name selected') @@ -37,7 +38,7 @@ def _check_returns(self): def get_changes(self, resources=None, task_handle=taskhandle.NullTaskHandle()): if resources is None: - resources = self.project.pycore.get_python_files() + resources = self.project.get_python_files() changes = change.ChangeSet('Using function <%s>' % self.pyfunction.get_name()) if self.resource in resources: @@ -55,7 +56,6 @@ def get_function_name(self): return self.pyfunction.get_name() def _restructure(self, resources, task_handle, others=True): - body = self._get_body() pattern = self._make_pattern() goal = self._make_goal(import_=others) imports = None @@ -75,7 +75,7 @@ def _find_temps(self): return find_temps(self.project, self._get_body()) def _module_name(self): - return self.project.pycore.modname(self.resource) + return libutils.modname(self.resource) def _make_pattern(self): params = self.pyfunction.get_param_names() @@ -123,7 +123,7 @@ def _is_expression(self): def find_temps(project, code): code = 'def f():\n' + sourceutils.indent_lines(code, 4) - pymodule = project.pycore.get_string_module(code) + pymodule = libutils.get_string_module(project, code) result = [] function_scope = pymodule.get_scope().get_scopes()[0] for name, pyname in function_scope.get_names().items(): @@ -135,16 +135,19 @@ def find_temps(project, code): def _returns_last(node): return node.body and isinstance(node.body[-1], ast.Return) + def _yield_count(node): visitor = _ReturnOrYieldFinder() visitor.start_walking(node) return visitor.yields + def _return_count(node): visitor = _ReturnOrYieldFinder() visitor.start_walking(node) return visitor.returns + class _ReturnOrYieldFinder(object): def __init__(self): diff --git a/pymode/libs2/rope/refactor/wildcards.py b/pymode/libs2/rope/refactor/wildcards.py index 6c487a2a..90040c79 100644 --- a/pymode/libs2/rope/refactor/wildcards.py +++ b/pymode/libs2/rope/refactor/wildcards.py @@ -100,7 +100,7 @@ def __call__(self, pymodule, node): pyname = self._evaluate_node(pymodule, node) if pyname is None or self.expected is None: return self.unsure - if self._unsure_pyname(pyname, unbound=self.kind=='name'): + if self._unsure_pyname(pyname, unbound=self.kind == 'name'): return True if self.kind == 'name': return self._same_pyname(self.expected, pyname) @@ -161,13 +161,15 @@ def _evaluate(self, code): class _BuiltinsStub(object): def get_attribute(self, name): return builtins.builtins[name] + def __getitem__(self, name): return builtins.builtins[name] + def __contains__(self, name): return name in builtins.builtins pyobject = _BuiltinsStub() else: - pyobject = self.project.pycore.get_module(attributes[0]) + pyobject = self.project.get_module(attributes[0]) for attribute in attributes[1:]: pyname = pyobject[attribute] if pyname is None: diff --git a/pymode/libs3/rope/__init__.py b/pymode/libs3/rope/__init__.py index 451ebe3a..a936fe29 100644 --- a/pymode/libs3/rope/__init__.py +++ b/pymode/libs3/rope/__init__.py @@ -1,7 +1,7 @@ """rope, a python refactoring library""" INFO = __doc__ -VERSION = '0.9.4' +VERSION = '0.9.4-1' COPYRIGHT = """\ Copyright (C) 2006-2010 Ali Gholami Rudi Copyright (C) 2009-2010 Anton Gritsay diff --git a/pymode/libs3/rope/refactor/patchedast.py b/pymode/libs3/rope/refactor/patchedast.py index 034dac35..042b33dd 100644 --- a/pymode/libs3/rope/refactor/patchedast.py +++ b/pymode/libs3/rope/refactor/patchedast.py @@ -1,7 +1,6 @@ import collections import re import warnings -import sys from rope.base import ast, codeanalyze, exceptions @@ -564,19 +563,6 @@ def _TryExcept(self, node): children.extend(['else', ':']) children.extend(node.orelse) self._handle(node, children) - - def _Try(self, node): - children = ['try', ':'] - children.extend(node.body) - children.extend(node.handlers) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - if node.finalbody: - children.extend(['finally', ':']) - children.extend(node.finalbody) - - self._handle(node, children) def _ExceptHandler(self, node): self._excepthandler(node) @@ -618,15 +604,9 @@ def _While(self, node): self._handle(node, children) def _With(self, node): - children = [] - if (sys.version_info[1] < 3): - children = ['with', node.context_expr] - if node.optional_vars: - children.extend(['as', node.optional_vars]) - else: - children = ['with', node.items[0].context_expr] - if node.items[0].optional_vars: - children.extend(['as', node.items[0].optional_vars]) + children = ['with', node.context_expr] + if node.optional_vars: + children.extend(['as', node.optional_vars]) children.append(':') children.extend(node.body) self._handle(node, children) diff --git a/pymode/libs3/rope/refactor/suites.py b/pymode/libs3/rope/refactor/suites.py index 041c06a2..d955c819 100644 --- a/pymode/libs3/rope/refactor/suites.py +++ b/pymode/libs3/rope/refactor/suites.py @@ -128,15 +128,6 @@ def _TryExcept(self, node): if node.orelse: self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - def _Try(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - for handler in node.handlers: - self.suites.append(Suite(handler.body, node.lineno, self.suite)) - if node.orelse: - self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - if node.finalbody: - self.suites.append(Suite(node.finalbody, node.lineno, self.suite)) - def _add_if_like_node(self, node): self.suites.append(Suite(node.body, node.lineno, self.suite)) if node.orelse: From 7cf4eb928dea1d347d0832cd12b30c0672c679b3 Mon Sep 17 00:00:00 2001 From: Tyler Fenby Date: Sun, 21 Dec 2014 17:40:35 -0500 Subject: [PATCH 139/428] Update AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index c09fe72d..ef6d0bbc 100644 --- a/AUTHORS +++ b/AUTHORS @@ -47,3 +47,4 @@ Contributors: * lee (loyalpartner); * nixon; * tramchamploo; +* Tyler Fenby (https://github.com/TFenby) From 74f5ad3bafaafbb9234e704c4644273f63986f5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Martano?= Date: Sat, 27 Dec 2014 19:24:46 -0200 Subject: [PATCH 140/428] typo --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 5cec03f9..33968b3e 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -435,7 +435,7 @@ use the current directory. The location of the `.ropeproject` folder may also be overridden if you wish to keep it outside of your project root. The rope library treats this folder as a -project resource, so the path will always be relative to your proejct root (a +project resource, so the path will always be relative to your project root (a leading '/' will be ignored). You may use `'..'` path segments to place the folder outside of your project root. *'g:pymode_rope_ropefolder'* From e8c0ed05061114c44a5d18b7a0fef5ccfd408a4e Mon Sep 17 00:00:00 2001 From: chuan92 Date: Mon, 12 Jan 2015 19:02:15 +0800 Subject: [PATCH 141/428] Fix #482, (PEP 263 Python Source Code Encodings) --- pymode/run.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pymode/run.py b/pymode/run.py index b5a2bfa1..b966fdfc 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -6,8 +6,7 @@ from .environment import env -encoding = re(r'#[^\w]+coding:\s+utf.*$') - +encoding = re(r'#.*coding[:=]\s*([-\w.]+)') def run_code(): """ Run python code in current buffer. @@ -18,9 +17,12 @@ def run_code(): errors, err = [], '' line1, line2 = env.var('a:line1'), env.var('a:line2') lines = __prepare_lines(line1, line2) - for ix in (0, 1): - if encoding.match(lines[ix]): - lines.pop(ix) + if encoding.match(lines[0]): + lines.pop(0) + if encoding.match(lines[0]): + lines.pop(0) + elif encoding.match(lines[1]): + lines.pop(1) context = dict( __name__='__main__', From 8b184ccc1ad312c49257831b6b1286df60f1096a Mon Sep 17 00:00:00 2001 From: Colin Deasy Date: Tue, 13 Jan 2015 16:56:22 +0000 Subject: [PATCH 142/428] Don't force indentation to be 0 for lines not starting with whitespace Some docstrings/code parts use line continuation with zero whitespace on the following line. In this case we should accept the previous lines indentation instead of forcing the indentation to be zero. --- autoload/pymode/folding.vim | 4 ---- 1 file changed, 4 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 93f18b09..d0e09597 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -133,10 +133,6 @@ fun! pymode#folding#expr(lnum) "{{{ endif endif - if indent == 0 - return 0 - endif - return '=' endfunction "}}} From da4d8b23a4dbeb8e08db7cb4580acee0bca9e6a2 Mon Sep 17 00:00:00 2001 From: Stefan Scherfke Date: Wed, 14 Jan 2015 11:05:24 +0100 Subject: [PATCH 143/428] =?UTF-8?q?Fix=20issue=20#519=20=E2=80=93=C2=A0fol?= =?UTF-8?q?d=20text=20truncated=20when=20relativenumber=20is=20set.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- autoload/pymode/folding.vim | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 93f18b09..2dcf0d68 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -23,7 +23,8 @@ fun! pymode#folding#text() " {{{ endif let line = getline(fs) - let nucolwidth = &fdc + &number * &numberwidth + let has_numbers = &number || &relativenumber + let nucolwidth = &fdc + has_numbers * &numberwidth let windowwidth = winwidth(0) - nucolwidth - 6 let foldedlinecount = v:foldend - v:foldstart From 9190132d4fb3f5758ea5a3840ec3c0789ec8ed1e Mon Sep 17 00:00:00 2001 From: Dimitrios Semitsoglou-Tsiapos Date: Tue, 17 Feb 2015 19:09:57 +0100 Subject: [PATCH 144/428] rope: correct refactoring function calls * (Temporarily) drops passing of `task_handle`. --- pymode/rope.py | 68 +++++++++++++++++++++----------------------------- 1 file changed, 28 insertions(+), 40 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index 159900bf..2b12bd43 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -505,14 +505,7 @@ def get_input_str(refactor, ctx): @staticmethod def get_changes(refactor, input_str, in_hierarchy=False): - """ Get changes. - - :return Changes: - - """ - progress = ProgressHandler('Calculate changes ...') - return refactor.get_changes( - input_str, task_handle=progress.handle, in_hierarchy = in_hierarchy) + raise NotImplementedError class RenameRefactoring(Refactoring): @@ -550,8 +543,26 @@ def get_input_str(self, refactor, ctx): return newname + @staticmethod + def get_changes(refactor, input_str, in_hierarchy): + """ Get changes. + + :return Changes: + + """ + return refactor.get_changes(input_str, in_hierarchy=in_hierarchy) + +class ExtractRefactoring(Refactoring): + @staticmethod + def get_changes(refactor, input_str, in_hierarchy): + """ Get changes. + + :return Changes: + + """ + return refactor.get_changes(input_str) #, global_=not in_hierarchy) -class ExtractMethodRefactoring(Refactoring): +class ExtractMethodRefactoring(ExtractRefactoring): """ Extract method. """ @@ -574,18 +585,8 @@ def get_refactor(ctx): return extract.ExtractMethod( ctx.project, ctx.resource, offset1, offset2) - @staticmethod - def get_changes(refactor, input_str): - """ Get changes. - :return Changes: - - """ - - return refactor.get_changes(input_str) - - -class ExtractVariableRefactoring(Refactoring): +class ExtractVariableRefactoring(ExtractRefactoring): """ Extract variable. """ @@ -608,16 +609,6 @@ def get_refactor(ctx): return extract.ExtractVariable( ctx.project, ctx.resource, offset1, offset2) - @staticmethod - def get_changes(refactor, input_str): - """ Get changes. - - :return Changes: - - """ - - return refactor.get_changes(input_str) - class InlineRefactoring(Refactoring): @@ -634,14 +625,13 @@ def get_refactor(ctx): return inline.create_inline(ctx.project, ctx.resource, offset) @staticmethod - def get_changes(refactor, input_str): + def get_changes(refactor, input_str, in_hierarchy): """ Get changes. :return Changes: """ - progress = ProgressHandler('Calculate changes ...') - return refactor.get_changes(task_handle=progress.handle) + return refactor.get_changes() class UseFunctionRefactoring(Refactoring): @@ -659,15 +649,13 @@ def get_refactor(ctx): return usefunction.UseFunction(ctx.project, ctx.resource, offset) @staticmethod - def get_changes(refactor, input_str): + def get_changes(refactor, input_str, in_hierarchy): """ Get changes. :return Changes: """ - progress = ProgressHandler('Calculate changes ...') - return refactor.get_changes( - resources=[refactor.resource], task_handle=progress.handle) + return refactor.get_changes() class ModuleToPackageRefactoring(Refactoring): @@ -684,7 +672,7 @@ def get_refactor(ctx): return ModuleToPackage(ctx.project, ctx.resource) @staticmethod - def get_changes(refactor, input_str): + def get_changes(refactor, input_str, in_hierarchy): """ Get changes. :return Changes: @@ -746,7 +734,7 @@ def get_refactor(ctx): return change_signature.ChangeSignature( ctx.project, ctx.resource, offset) - def get_changes(self, refactor, input_string): + def get_changes(self, refactor, input_string, in_hierarchy): """ Function description. :return Rope.changes: @@ -771,7 +759,7 @@ def get_changes(self, refactor, input_string): changers.append(change_signature.ArgumentReorderer( order, autodef='None')) - return refactor.get_changes(changers) + return refactor.get_changes(changers, in_hierarchy=in_hierarchy) class GenerateElementRefactoring(Refactoring): From 7ce8c76675ef577bc565e9d5494e721310690c78 Mon Sep 17 00:00:00 2001 From: Samir Benmendil Date: Sun, 22 Feb 2015 14:58:29 +0000 Subject: [PATCH 145/428] Don't skip a line when the first docstring contains text --- autoload/pymode/folding.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 0ae61dd4..e54ef1bf 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -18,7 +18,7 @@ fun! pymode#folding#text() " {{{ while getline(fs) !~ s:def_regex && getline(fs) !~ s:doc_begin_regex let fs = nextnonblank(fs + 1) endwhile - if getline(fs) =~ s:doc_begin_regex + if getline(fs) =~ s:doc_end_regex && getline(fs) =~ s:doc_begin_regex let fs = nextnonblank(fs + 1) endif let line = getline(fs) From e6c914481768139c8abaf2491b17ace5b7b37c1a Mon Sep 17 00:00:00 2001 From: Samir Benmendil Date: Mon, 23 Feb 2015 00:54:44 +0000 Subject: [PATCH 146/428] Don't fold single line def Simply checks whether the next line is more indented than the line matching def_regex and if it's not don't increase the fold level. --- autoload/pymode/folding.vim | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 0ae61dd4..7974112b 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -50,6 +50,10 @@ fun! pymode#folding#expr(lnum) "{{{ endif if line =~ s:def_regex + " single line def + if indent(a:lnum) >= indent(a:lnum+1) + return '=' + endif " Check if last decorator is before the last def let decorated = 0 let lnum = a:lnum - 1 From 86161aa9cd7099abd08ee68fc68420cea58c05d2 Mon Sep 17 00:00:00 2001 From: Dylan Semler Date: Sun, 8 Mar 2015 08:26:23 -0400 Subject: [PATCH 147/428] fix placement of pymode_options help description --- doc/pymode.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 33968b3e..36ce040e 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -98,10 +98,6 @@ Setup default python options *'g:pymode_options'* > let g:pymode_options = 1 -Setup max line length *'g:pymode_options_max_line_length'* -> - let g:pymode_options_max_line_length = 79 - If this option is set to 1, pymode will enable the following options for python buffers: > @@ -115,6 +111,10 @@ python buffers: > setlocal commentstring=#%s setlocal define=^\s*\\(def\\\\|class\\) +Setup max line length *'g:pymode_options_max_line_length'* +> + let g:pymode_options_max_line_length = 79 + Enable colorcolumn display at max_line_length *'g:pymode_options_colorcolumn'* > let g:pymode_options_colorcolumn = 1 From f4e8437457df2164f51eaa67ac4391425f48e322 Mon Sep 17 00:00:00 2001 From: "John L. Villalovos" Date: Wed, 1 Apr 2015 14:34:06 -0700 Subject: [PATCH 148/428] Use 'https:' instead of 'git:' in documentation For people behind a proxy server it is difficult to 'git clone' using 'git:'. While 'https:' is universally usable. --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index b6f9bb69..b1da7774 100644 --- a/README.rst +++ b/README.rst @@ -62,7 +62,7 @@ Using pathogen (recommended) % cd ~/.vim % mkdir -p bundle && cd bundle - % git clone git://github.com/klen/python-mode.git + % git clone https://github.com/klen/python-mode.git - Enable `pathogen `_ in your ``~/.vimrc``: :: @@ -81,7 +81,7 @@ Manually -------- :: - % git clone git://github.com/klen/python-mode.git + % git clone https://github.com/klen/python-mode.git % cd python-mode % cp -R * ~/.vim From 0368708fd3b3dbb8e3e2618e71f5b015c8142578 Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Fri, 3 Apr 2015 11:33:29 -0700 Subject: [PATCH 149/428] Skip doctest regions when searching for open pairs --- autoload/pymode/indent.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/indent.vim b/autoload/pymode/indent.vim index d8e9f148..efd41f29 100644 --- a/autoload/pymode/indent.vim +++ b/autoload/pymode/indent.vim @@ -110,7 +110,7 @@ function! s:SearchParensPair() " {{{ " Skip strings and comments and don't look too far let skip = "line('.') < " . (line - 50) . " ? dummy :" . \ 'synIDattr(synID(line("."), col("."), 0), "name") =~? ' . - \ '"string\\|comment"' + \ '"string\\|comment\\|doctest"' " Search for parentheses call cursor(line, col) From 1ff7c2febe0bb1d901b5d54a64306da741c0d50f Mon Sep 17 00:00:00 2001 From: Valerio Crini Date: Sun, 3 May 2015 21:10:14 +0200 Subject: [PATCH 150/428] removing misstype --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index ae2a5d27..6f5bd5fc 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -443,7 +443,7 @@ imported) from project *'g:pymode_rope_autoimport'* Load modules to autoimport by default *'g:pymode_rope_autoimport_modules'* > - let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime']) + let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime'] Offer to unresolved import object after completion. > From ba3ec252c23ba1503dc69b2446268d05bb0c362e Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Sun, 3 May 2015 21:38:11 -0700 Subject: [PATCH 151/428] Highlight comments inside class/function arg lists --- syntax/python.vim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/syntax/python.vim b/syntax/python.vim index 4c218c04..8c941374 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -80,13 +80,13 @@ endif syn match pythonFunction "\%(\%(def\s\|@\)\s*\)\@<=\h\%(\w\|\.\)*" contained nextgroup=pythonVars syn region pythonVars start="(" skip=+\(".*"\|'.*'\)+ end=")" contained contains=pythonParameters transparent keepend syn match pythonParameters "[^,]*" contained contains=pythonParam skipwhite - syn match pythonParam "[^,]*" contained contains=pythonExtraOperator,pythonLambdaExpr,pythonBuiltinObj,pythonBuiltinType,pythonConstant,pythonString,pythonNumber,pythonBrackets,pythonSelf skipwhite + syn match pythonParam "[^,]*" contained contains=pythonExtraOperator,pythonLambdaExpr,pythonBuiltinObj,pythonBuiltinType,pythonConstant,pythonString,pythonNumber,pythonBrackets,pythonSelf,pythonComment skipwhite syn match pythonBrackets "{[(|)]}" contained skipwhite syn keyword pythonStatement class nextgroup=pythonClass skipwhite syn match pythonClass "\%(\%(class\s\)\s*\)\@<=\h\%(\w\|\.\)*" contained nextgroup=pythonClassVars syn region pythonClassVars start="(" end=")" contained contains=pythonClassParameters transparent keepend - syn match pythonClassParameters "[^,\*]*" contained contains=pythonBuiltin,pythonBuiltinObj,pythonBuiltinType,pythonExtraOperatorpythonStatement,pythonBrackets,pythonString skipwhite + syn match pythonClassParameters "[^,\*]*" contained contains=pythonBuiltin,pythonBuiltinObj,pythonBuiltinType,pythonExtraOperatorpythonStatement,pythonBrackets,pythonString,pythonComment skipwhite syn keyword pythonRepeat for while syn keyword pythonConditional if elif else From 956e3dbe4a3d767f08f258b94abe702669874245 Mon Sep 17 00:00:00 2001 From: Nate Zhang Date: Thu, 14 May 2015 04:13:44 +0800 Subject: [PATCH 152/428] Add Python documentation vertical display option --- autoload/pymode/doc.vim | 3 +++ plugin/pymode.vim | 3 +++ 2 files changed, 6 insertions(+) diff --git a/autoload/pymode/doc.vim b/autoload/pymode/doc.vim index d29d5e9e..b89eb0e7 100644 --- a/autoload/pymode/doc.vim +++ b/autoload/pymode/doc.vim @@ -29,6 +29,9 @@ fun! pymode#doc#show(word) "{{{ setlocal nomodifiable setlocal nomodified setlocal filetype=rst + if g:pymode_doc_vertical + wincmd L + endif wincmd p endfunction "}}} diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 9bd4d95c..53408152 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -52,6 +52,9 @@ call pymode#default("g:pymode_options", 1) call pymode#default("g:pymode_options_max_line_length", 80) call pymode#default("g:pymode_options_colorcolumn", 1) +" Enable/disable vertical display of python documentation +call pymode#default("g:pymode_doc_vertical", 0) + " Minimal height of pymode quickfix window call pymode#default('g:pymode_quickfix_maxheight', 6) From 113909f386855c2c769374e6f664d74c794f9742 Mon Sep 17 00:00:00 2001 From: Chris Drane Date: Thu, 21 May 2015 10:23:30 -0400 Subject: [PATCH 153/428] Removed ambiguity in doc/pymode.txt related to pymode_options defaults --- doc/pymode.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 33968b3e..36ce040e 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -98,10 +98,6 @@ Setup default python options *'g:pymode_options'* > let g:pymode_options = 1 -Setup max line length *'g:pymode_options_max_line_length'* -> - let g:pymode_options_max_line_length = 79 - If this option is set to 1, pymode will enable the following options for python buffers: > @@ -115,6 +111,10 @@ python buffers: > setlocal commentstring=#%s setlocal define=^\s*\\(def\\\\|class\\) +Setup max line length *'g:pymode_options_max_line_length'* +> + let g:pymode_options_max_line_length = 79 + Enable colorcolumn display at max_line_length *'g:pymode_options_colorcolumn'* > let g:pymode_options_colorcolumn = 1 From d8f04f943e9614e436bd9ef6f7b22f27c3bf79bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Korzeniewski?= Date: Sun, 31 May 2015 16:47:28 +0200 Subject: [PATCH 154/428] Add wdb to debugger list in breakpoint cmd. --- autoload/pymode/breakpoint.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index 18e1a95b..cf7b95be 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -17,7 +17,7 @@ fun! pymode#breakpoint#init() "{{{ from imp import find_module -for module in ('pudb', 'ipdb'): +for module in ('wdb', 'pudb', 'ipdb'): try: find_module(module) vim.command('let g:pymode_breakpoint_cmd = "import %s; %s.set_trace() # XXX BREAKPOINT"' % (module, module)) From 8144e994cabe27a8f169c1bf001c58ad0e998d20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guido=20Perc=C3=BA?= Date: Wed, 1 Jul 2015 18:06:24 -0300 Subject: [PATCH 155/428] Documentation is on :help pymode --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b6f9bb69..827dd95c 100644 --- a/README.rst +++ b/README.rst @@ -40,7 +40,7 @@ See (very old) screencast here: http://www.youtube.com/watch?v=67OZNp9Z0CQ (sorry for quality, this is my first screencast) Another old presentation here: http://www.youtube.com/watch?v=YhqsjUUHj6g -**To read python-mode documentation in Vim, see** ``:help pymode.txt`` +**To read python-mode documentation in Vim, see** ``:help pymode`` .. contents:: From 88969fb67e3c7c32c711954abdb86e94c9686fe4 Mon Sep 17 00:00:00 2001 From: Robin Schneider Date: Sun, 5 Jul 2015 22:59:16 +0200 Subject: [PATCH 156/428] g:pymode_trim_whitespaces = 0 was ignored. * Added condition to the trim function itself. --- autoload/pymode.vim | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index c518c415..8c4cdea6 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -76,9 +76,11 @@ endfunction "}}} " DESC: Remove unused whitespaces fun! pymode#trim_whitespaces() "{{{ - let cursor_pos = getpos('.') - silent! %s/\s\+$// - call setpos('.', cursor_pos) + if g:pymode_trim_whitespaces + let cursor_pos = getpos('.') + silent! %s/\s\+$// + call setpos('.', cursor_pos) + endif endfunction "}}} From 08ec591f46eb3e7ada39c1f79d0ffdbab091317d Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Tue, 25 Aug 2015 18:43:00 +0300 Subject: [PATCH 157/428] Update Rope --- AUTHORS | 2 +- Changelog.rst | 2 ++ Makefile | 6 ++++++ README.rst | 4 ++-- doc/pymode.txt | 8 ++++---- plugin/pymode.vim | 2 +- pylama.ini | 11 ++++++++--- pymode/libs2/rope/base/default_config.py | 5 +++-- pymode/libs3/rope/base/default_config.py | 6 +++--- pymode/rope.py | 18 ++++++------------ 10 files changed, 36 insertions(+), 28 deletions(-) diff --git a/AUTHORS b/AUTHORS index ef6d0bbc..99ed09ec 100644 --- a/AUTHORS +++ b/AUTHORS @@ -40,6 +40,7 @@ Contributors: * Sorin Ionescu (sorin-ionescu); * Steve Losh (http://github.com/sjl); * Tommy Allen (https://github.com/tweekmonster) +* Tyler Fenby (https://github.com/TFenby) * Wayne Ye (https://github.com/WayneYe) * bendavis78 (http://github.com/bendavis78) * fwuzju (http://github.com/fwuzju) @@ -47,4 +48,3 @@ Contributors: * lee (loyalpartner); * nixon; * tramchamploo; -* Tyler Fenby (https://github.com/TFenby) diff --git a/Changelog.rst b/Changelog.rst index ac48fccb..6e728832 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -2,12 +2,14 @@ Changelog ========= * Pylama updated to version 5.0.5 +* Rope libs updated * Add 'pymode_options_max_line_length' option * Add ability to set related checker options `:help pymode-lint-options` Options added: 'pymode_lint_options_pep8', 'pymode_lint_options_pep257', 'pymode_lint_options_mccabe', 'pymode_lint_options_pyflakes', 'pymode_lint_options_pylint' + ## 2014-06-11 0.8.1 ------------------- * Pylama updated to version 3.3.2 diff --git a/Makefile b/Makefile index a4370605..38c009c2 100644 --- a/Makefile +++ b/Makefile @@ -26,6 +26,12 @@ pylama: make $(PYLAMA) make $(PYLAMA)/lint/pylama_pylint +.PHONY: rope +rope: + @git clone https://github.com/python-rope/rope.git $(CURDIR)/_/rope + @rm -rf $(CURDIR)/pymode/libs/rope + @cp -r $(CURDIR)/_/rope/rope $(CURDIR)/pymode/libs/. + $(PYLAMA): cp -r ~/Dropbox/projects/pylama/pylama $(PYLAMA) diff --git a/README.rst b/README.rst index b6f9bb69..54b659ae 100644 --- a/README.rst +++ b/README.rst @@ -187,8 +187,8 @@ License Licensed under a `GNU lesser general public license`_. -If you like this plugin, you can send me postcard :) -My address is here: "Russia, 143401, Krasnogorsk, Shkolnaya 1-19" to "Kirill Klenov". +If you like this plugin, I would very appreciated if you kindly send me a postcard :) +My address is here: "Russia, 143500, MO, Istra, pos. Severny 8-3" to "Kirill Klenov". **Thanks for support!** .. _GNU lesser general public license: http://www.gnu.org/copyleft/lesser.html diff --git a/doc/pymode.txt b/doc/pymode.txt index 33968b3e..55e51a4f 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -482,7 +482,7 @@ Keymap for autocomplete *'g:pymode_rope_completion_bind'* Extended autocompletion (rope could complete objects which have not been imported) from project *'g:pymode_rope_autoimport'* > - let g:pymode_rope_autoimport = 1 + let g:pymode_rope_autoimport = 0 Load modules to autoimport by default *'g:pymode_rope_autoimport_modules'* > @@ -781,10 +781,10 @@ The sequence of commands that fixed this: Python-mode is released under the GNU lesser general public license. See: http://www.gnu.org/copyleft/lesser.html -If you like this plugin, you can send me a postcard :) +If you like this plugin, I would very appreciated if you kindly send me a postcard :) -My address is: "Russia, 143401, Krasnogorsk, Shkolnaya 1-19" to "Kirill -Klenov". Thanks for your support! +My address is: "Russia, 143500, MO, Istra, pos. Severny 8-3" to "Kirill Klenov". +Thanks for your support! ------------------------------------------------------------------------------ diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 9bd4d95c..de93cf29 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -183,7 +183,7 @@ call pymode#default('g:pymode_rope_completion', 1) " Complete keywords from not imported modules (could make completion slower) " Enable autoimport used modules -call pymode#default('g:pymode_rope_autoimport', 1) +call pymode#default('g:pymode_rope_autoimport', 0) " Offer to import object after complete (if that not be imported before) call pymode#default('g:pymode_rope_autoimport_import_after_complete', 0) diff --git a/pylama.ini b/pylama.ini index 07c1ab7a..b8d3f375 100644 --- a/pylama.ini +++ b/pylama.ini @@ -1,3 +1,8 @@ -[main] -ignore = R0201,R0922,E1103 -skip = pymode/autopep8.py +[pylama] +linters=pep8,pyflakes,pylint + +[pylama:pymode/libs*] +skip=1 + +[pylama:pylint] +disable=E1120,E1130,E1103,W1401 diff --git a/pymode/libs2/rope/base/default_config.py b/pymode/libs2/rope/base/default_config.py index 0ee9937d..3745e306 100644 --- a/pymode/libs2/rope/base/default_config.py +++ b/pymode/libs2/rope/base/default_config.py @@ -13,8 +13,9 @@ def set_prefs(prefs): # '.svn': matches 'pkg/.svn' and all of its children # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' - prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', - '.hg', '.svn', '_svn', '.git', '.tox'] + prefs['ignored_resources'] = [ + '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', + '.tox', '.env', 'node_modules', 'bower_components'] # Specifies which files should be considered python files. It is # useful when you have scripts inside your project. Only files diff --git a/pymode/libs3/rope/base/default_config.py b/pymode/libs3/rope/base/default_config.py index eda47b24..126cf7bf 100644 --- a/pymode/libs3/rope/base/default_config.py +++ b/pymode/libs3/rope/base/default_config.py @@ -13,9 +13,9 @@ def set_prefs(prefs): # '.svn': matches 'pkg/.svn' and all of its children # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' - prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', - '.hg', '.svn', '_svn', '.git', - '__pycache__'] + prefs['ignored_resources'] = [ + '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', + '__pycache__', '.tox', '.env', 'node_modules', 'bower_components'] # Specifies which files should be considered python files. It is # useful when you have scripts inside your project. Only files diff --git a/pymode/rope.py b/pymode/rope.py index 159900bf..2347e49e 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,7 +1,6 @@ """ Rope support in pymode. """ from __future__ import absolute_import, print_function -import multiprocessing import os.path import re import site @@ -13,7 +12,6 @@ from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa from rope.refactor import ModuleToPackage, ImportOrganizer, rename, extract, inline, usefunction, move, change_signature, importutils # noqa -from ._compat import StringIO from .environment import env @@ -352,8 +350,7 @@ def __init__(self, path, project_path): """ Init Rope context. """ self.path = path - self.project = project.Project( - project_path, fscommands=FileSystemCommands()) + self.project = project.Project(project_path, fscommands=FileSystemCommands()) self.importer = rope_autoimport.AutoImport( project=self.project, observe=False) @@ -462,8 +459,8 @@ def run(self): action = env.user_input_choices( 'Choose what to do:', 'perform', 'preview', - 'perform in class hierarchy', - 'preview in class hierarchy') + 'perform in class hierarchy', + 'preview in class hierarchy') in_hierarchy = action.endswith("in class hierarchy") @@ -512,7 +509,7 @@ def get_changes(refactor, input_str, in_hierarchy=False): """ progress = ProgressHandler('Calculate changes ...') return refactor.get_changes( - input_str, task_handle=progress.handle, in_hierarchy = in_hierarchy) + input_str, task_handle=progress.handle, in_hierarchy=in_hierarchy) class RenameRefactoring(Refactoring): @@ -746,13 +743,12 @@ def get_refactor(ctx): return change_signature.ChangeSignature( ctx.project, ctx.resource, offset) - def get_changes(self, refactor, input_string): + def get_changes(self, refactor, input_string, in_hierarchy=False): """ Function description. :return Rope.changes: """ - args = re.sub(r'[\s\(\)]+', '', input_string).split(',') olds = [arg[0] for arg in refactor.get_args()] @@ -793,7 +789,7 @@ def get_refactor(self, ctx): return generate.create_generate( self.kind, ctx.project, ctx.resource, offset) - def get_changes(self, refactor, input_str): + def get_changes(self, refactor, input_str, in_hierarchy=False): """ Function description. :return Rope.changes: @@ -938,5 +934,3 @@ def _insert_import(name, module, ctx): progress = ProgressHandler('Apply changes ...') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes) - -# pylama:ignore=W1401,E1120,D From 59a300e99a1dfd03ec5f3919ef41fa42848f4676 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Wed, 26 Aug 2015 13:03:35 +0300 Subject: [PATCH 158/428] Update authors --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 5f97486f..aad20bd1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -38,6 +38,7 @@ Contributors: * Piet Delport (http://github.com/pjdelport); * Robert David Grant (http://github.com/bgrant); * Ronald Andreu Kaiser (http://github.com/cathoderay); +* Samir Benmendil (https://github.com/Ram-Z) * Sorin Ionescu (sorin-ionescu); * Steve Losh (http://github.com/sjl); * Tommy Allen (https://github.com/tweekmonster) From fa6322e04c53916b6f947c7e33c359c3af2e281e Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Sun, 30 Aug 2015 18:26:54 -0700 Subject: [PATCH 159/428] Fix folding after blank line in class/def --- autoload/pymode/folding.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index c869b05f..3ed61bc5 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -51,7 +51,7 @@ fun! pymode#folding#expr(lnum) "{{{ if line =~ s:def_regex " single line def - if indent(a:lnum) >= indent(a:lnum+1) + if indent(a:lnum) >= indent(a:lnum+1) && getline(prevnonblank(a:lnum)) !~ ':\s*$' return '=' endif " Check if last decorator is before the last def From 294894abfd9925261f88f0b874e853e2fe362903 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 7 Sep 2015 08:54:54 +0200 Subject: [PATCH 160/428] Fix fold marker --- autoload/pymode.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 8c4cdea6..1ce29c3f 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -109,7 +109,7 @@ endfunction "}}} fun! pymode#buffer_pre_write() "{{{ let b:pymode_modified = &modified -endfunction +endfunction "}}} fun! pymode#buffer_post_write() "{{{ if g:pymode_rope From f395f43197b159c18ba12c036bd0d6794ee87290 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 7 Sep 2015 08:55:33 +0200 Subject: [PATCH 161/428] Flip operands This avoids accessing the b:python_modified value, which under some circumstances cannot be set, which results in the following error: Error detected while processing function pymode#buffer_post_write: line 2: E121: Undefined variable: b:pymode_modified E15: Invalid expression: b:pymode_modified && g:pymode_rope_regenerate_on_write Note that this does not address the core issue with why pymode_modified if unset in the first place, but this avoids that being a problem if g:python_rope_regenerate_on_write is not wanted anyway. --- autoload/pymode.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode.vim b/autoload/pymode.vim index 1ce29c3f..6c5de80c 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -113,7 +113,7 @@ endfunction "}}} fun! pymode#buffer_post_write() "{{{ if g:pymode_rope - if b:pymode_modified && g:pymode_rope_regenerate_on_write + if g:pymode_rope_regenerate_on_write && b:pymode_modified call pymode#debug('regenerate') call pymode#rope#regenerate() endif From af4268183a9a005da6ea37874f720ae90e64ff3b Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 7 Sep 2015 20:57:45 +0300 Subject: [PATCH 162/428] Update pylama --- AUTHORS | 3 + Changelog.rst | 6 + Makefile | 4 +- pymode/autopep8.py | 773 ++-- pymode/environment.py | 52 +- pymode/libs/_markerlib/__init__.py | 16 + pymode/libs/_markerlib/markers.py | 119 + .../pylama_pylint => }/astroid/__init__.py | 15 +- .../pylama_pylint => }/astroid/__pkginfo__.py | 16 +- .../pylama_pylint => }/astroid/as_string.py | 85 +- pymode/libs/astroid/astpeephole.py | 86 + .../lint/pylama_pylint => }/astroid/bases.py | 94 +- .../libs/astroid/brain/builtin_inference.py | 245 ++ .../pylama_pylint => }/astroid/brain/py2gi.py | 52 +- .../astroid/brain/py2mechanize.py | 10 +- pymode/libs/astroid/brain/py2pytest.py | 31 + pymode/libs/astroid/brain/py2qt4.py | 22 + pymode/libs/astroid/brain/py2stdlib.py | 334 ++ pymode/libs/astroid/brain/pynose.py | 79 + pymode/libs/astroid/brain/pysix_moves.py | 261 ++ .../pylama_pylint => }/astroid/builder.py | 54 +- .../pylama_pylint => }/astroid/exceptions.py | 0 .../pylama_pylint => }/astroid/inference.py | 80 +- pymode/libs/astroid/inspector.py | 273 ++ .../pylama_pylint => }/astroid/manager.py | 127 +- .../lint/pylama_pylint => }/astroid/mixins.py | 20 +- pymode/libs/astroid/modutils.py | 670 ++++ .../astroid/node_classes.py | 116 +- .../lint/pylama_pylint => }/astroid/nodes.py | 1 + .../pylama_pylint => }/astroid/protocols.py | 117 +- .../astroid/raw_building.py | 35 +- .../pylama_pylint => }/astroid/rebuilder.py | 182 +- .../astroid/scoped_nodes.py | 526 ++- pymode/libs/astroid/test_utils.py | 218 ++ .../lint/pylama_pylint => }/astroid/utils.py | 29 +- pymode/libs/easy_install.py | 5 + .../logilab/common/__init__.py | 17 +- pymode/libs/logilab/common/cache.py | 114 + .../logilab/common/changelog.py | 6 +- pymode/libs/logilab/common/clcommands.py | 334 ++ pymode/libs/logilab/common/compat.py | 78 + .../logilab/common/configuration.py | 93 +- pymode/libs/logilab/common/daemon.py | 101 + pymode/libs/logilab/common/date.py | 335 ++ pymode/libs/logilab/common/debugger.py | 214 ++ .../logilab/common/decorators.py | 18 +- .../logilab/common/deprecation.py | 5 +- pymode/libs/logilab/common/fileutils.py | 404 +++ .../logilab/common/graph.py | 22 +- .../logilab/common/interface.py | 0 pymode/libs/logilab/common/logging_ext.py | 195 ++ .../logilab/common/modutils.py | 58 +- .../logilab/common/optik_ext.py | 17 +- pymode/libs/logilab/common/optparser.py | 92 + pymode/libs/logilab/common/proc.py | 277 ++ pymode/libs/logilab/common/pytest.py | 1202 +++++++ pymode/libs/logilab/common/registry.py | 1125 ++++++ pymode/libs/logilab/common/shellutils.py | 462 +++ pymode/libs/logilab/common/sphinx_ext.py | 87 + pymode/libs/logilab/common/sphinxutils.py | 122 + pymode/libs/logilab/common/table.py | 929 +++++ pymode/libs/logilab/common/tasksqueue.py | 101 + pymode/libs/logilab/common/testlib.py | 1338 +++++++ .../logilab/common/textutils.py | 7 +- .../pylama_pylint => }/logilab/common/tree.py | 0 pymode/libs/logilab/common/umessage.py | 194 + .../logilab/common/ureports/__init__.py | 18 +- .../logilab/common/ureports/docbook_writer.py | 3 +- .../logilab/common/ureports/html_writer.py | 70 +- .../logilab/common/ureports/nodes.py | 4 +- .../logilab/common/ureports/text_writer.py | 37 +- pymode/libs/logilab/common/urllib2ext.py | 89 + pymode/libs/logilab/common/vcgutils.py | 216 ++ .../logilab/common/visitor.py | 8 +- pymode/libs/logilab/common/xmlutils.py | 61 + .../libs/logilab_common-1.0.2-py2.7-nspkg.pth | 1 + .../DESCRIPTION.rst | 153 + .../logilab_common-1.0.2.dist-info/METADATA | 169 + .../logilab_common-1.0.2.dist-info/RECORD | 87 + .../libs/logilab_common-1.0.2.dist-info/WHEEL | 5 + .../metadata.json | 1 + .../namespace_packages.txt | 1 + .../top_level.txt | 1 + .../{pylama/lint/pylama_mccabe => }/mccabe.py | 78 +- pymode/libs/pep257.py | 1187 +++++++ .../{pylama/lint/pylama_pep8 => }/pep8.py | 273 +- pymode/libs/pkg_resources/__init__.py | 3113 +++++++++++++++++ .../_vendor}/__init__.py | 0 .../_vendor/packaging/__about__.py | 31 + .../_vendor/packaging/__init__.py | 24 + .../_vendor/packaging/_compat.py | 40 + .../_vendor/packaging/_structures.py | 78 + .../_vendor/packaging/specifiers.py | 784 +++++ .../_vendor/packaging/version.py | 403 +++ pymode/libs/pyflakes/__init__.py | 1 + pymode/libs/pyflakes/__main__.py | 5 + pymode/libs/pyflakes/api.py | 175 + .../pylama_pyflakes => }/pyflakes/checker.py | 69 +- .../pylama_pyflakes => }/pyflakes/messages.py | 15 +- pymode/libs/pyflakes/reporter.py | 81 + pymode/libs/pylama/__init__.py | 6 +- pymode/libs/pylama/__main__.py | 6 + pymode/libs/pylama/{tasks.py => async.py} | 52 +- pymode/libs/pylama/config.py | 54 +- pymode/libs/pylama/core.py | 60 +- pymode/libs/pylama/errors.py | 45 +- pymode/libs/pylama/hook.py | 20 +- pymode/libs/pylama/lint/__init__.py | 13 +- pymode/libs/pylama/lint/extensions.py | 37 +- pymode/libs/pylama/lint/pylama_mccabe.py | 29 + .../pylama/lint/pylama_mccabe/__init__.py | 20 - pymode/libs/pylama/lint/pylama_pep257.py | 21 + .../pylama/lint/pylama_pep257/__init__.py | 26 - .../libs/pylama/lint/pylama_pep257/pep257.py | 728 ---- .../__init__.py => pylama_pep8.py} | 28 +- pymode/libs/pylama/lint/pylama_pyflakes.py | 49 + .../pylama/lint/pylama_pyflakes/__init__.py | 65 - .../lint/pylama_pyflakes/pyflakes/__init__.py | 2 - .../pylama/lint/pylama_pylint/__init__.py | 9 +- .../pylama_pylint/astroid/brain/py2qt4.py | 25 - .../pylama_pylint/astroid/brain/py2stdlib.py | 252 -- .../logilab/common/__pkginfo__.py | 53 - .../pylama_pylint/logilab/common/compat.py | 243 -- pymode/libs/pylama/lint/pylama_pylint/main.py | 2 - .../pylama_pylint/pylint/checkers/stdlib.py | 69 - .../pylama_pylint/pylint/checkers/strings.py | 304 -- pymode/libs/pylama/main.py | 100 +- pymode/libs/pylama/pytest.py | 17 +- .../pylama_pylint => }/pylint/__init__.py | 2 + pymode/libs/pylint/__main__.py | 3 + .../pylama_pylint => }/pylint/__pkginfo__.py | 12 +- .../pylint/checkers/__init__.py | 43 +- .../pylint/checkers/base.py | 687 ++-- .../pylint/checkers/classes.py | 474 ++- .../pylint/checkers/design_analysis.py | 144 +- .../pylint/checkers/exceptions.py | 284 +- .../pylint/checkers/format.py | 221 +- .../pylint/checkers/imports.py | 81 +- .../pylint/checkers/logging.py | 99 +- .../pylint/checkers/misc.py | 50 +- .../pylint/checkers/newstyle.py | 49 +- pymode/libs/pylint/checkers/python3.py | 581 +++ .../pylint/checkers/raw_metrics.py | 0 .../pylint/checkers/similar.py | 55 +- pymode/libs/pylint/checkers/spelling.py | 250 ++ pymode/libs/pylint/checkers/stdlib.py | 216 ++ pymode/libs/pylint/checkers/strings.py | 615 ++++ .../pylint/checkers/typecheck.py | 270 +- .../pylint/checkers/utils.py | 192 +- .../pylint/checkers/variables.py | 474 ++- .../lint/pylama_pylint => }/pylint/config.py | 17 +- pymode/libs/pylint/epylint.py | 177 + pymode/libs/pylint/gui.py | 531 +++ .../pylama_pylint => }/pylint/interfaces.py | 16 +- .../lint/pylama_pylint => }/pylint/lint.py | 885 +++-- pymode/libs/pylint/pyreverse/__init__.py | 5 + pymode/libs/pylint/pyreverse/diadefslib.py | 233 ++ pymode/libs/pylint/pyreverse/diagrams.py | 247 ++ pymode/libs/pylint/pyreverse/main.py | 124 + pymode/libs/pylint/pyreverse/utils.py | 132 + pymode/libs/pylint/pyreverse/writer.py | 199 ++ .../pylint/reporters/__init__.py | 55 +- .../pylint/reporters/guireporter.py | 7 +- .../pylint/reporters/html.py | 47 +- pymode/libs/pylint/reporters/json.py | 58 + .../pylint/reporters/text.py | 37 +- pymode/libs/pylint/testutils.py | 412 +++ .../lint/pylama_pylint => }/pylint/utils.py | 588 ++-- pymode/libs/six.py | 838 +++++ pymode/lint.py | 16 +- pymode/rope.py | 2 +- pymode/run.py | 1 + pymode/utils.py | 11 +- 173 files changed, 27150 insertions(+), 4609 deletions(-) create mode 100644 pymode/libs/_markerlib/__init__.py create mode 100644 pymode/libs/_markerlib/markers.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/__init__.py (89%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/__pkginfo__.py (78%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/as_string.py (85%) create mode 100644 pymode/libs/astroid/astpeephole.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/bases.py (89%) create mode 100644 pymode/libs/astroid/brain/builtin_inference.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/brain/py2gi.py (75%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/brain/py2mechanize.py (53%) create mode 100644 pymode/libs/astroid/brain/py2pytest.py create mode 100644 pymode/libs/astroid/brain/py2qt4.py create mode 100644 pymode/libs/astroid/brain/py2stdlib.py create mode 100644 pymode/libs/astroid/brain/pynose.py create mode 100644 pymode/libs/astroid/brain/pysix_moves.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/builder.py (85%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/exceptions.py (100%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/inference.py (87%) create mode 100644 pymode/libs/astroid/inspector.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/manager.py (73%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/mixins.py (92%) create mode 100644 pymode/libs/astroid/modutils.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/node_classes.py (88%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/nodes.py (98%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/protocols.py (75%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/raw_building.py (92%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/rebuilder.py (87%) rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/scoped_nodes.py (67%) create mode 100644 pymode/libs/astroid/test_utils.py rename pymode/libs/{pylama/lint/pylama_pylint => }/astroid/utils.py (89%) create mode 100644 pymode/libs/easy_install.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/__init__.py (92%) create mode 100644 pymode/libs/logilab/common/cache.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/changelog.py (98%) create mode 100644 pymode/libs/logilab/common/clcommands.py create mode 100644 pymode/libs/logilab/common/compat.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/configuration.py (94%) create mode 100644 pymode/libs/logilab/common/daemon.py create mode 100644 pymode/libs/logilab/common/date.py create mode 100644 pymode/libs/logilab/common/debugger.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/decorators.py (95%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/deprecation.py (98%) create mode 100644 pymode/libs/logilab/common/fileutils.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/graph.py (93%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/interface.py (100%) create mode 100644 pymode/libs/logilab/common/logging_ext.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/modutils.py (94%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/optik_ext.py (96%) create mode 100644 pymode/libs/logilab/common/optparser.py create mode 100644 pymode/libs/logilab/common/proc.py create mode 100644 pymode/libs/logilab/common/pytest.py create mode 100644 pymode/libs/logilab/common/registry.py create mode 100644 pymode/libs/logilab/common/shellutils.py create mode 100644 pymode/libs/logilab/common/sphinx_ext.py create mode 100644 pymode/libs/logilab/common/sphinxutils.py create mode 100644 pymode/libs/logilab/common/table.py create mode 100644 pymode/libs/logilab/common/tasksqueue.py create mode 100644 pymode/libs/logilab/common/testlib.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/textutils.py (99%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/tree.py (100%) create mode 100644 pymode/libs/logilab/common/umessage.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/ureports/__init__.py (93%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/ureports/docbook_writer.py (99%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/ureports/html_writer.py (66%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/ureports/nodes.py (98%) rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/ureports/text_writer.py (82%) create mode 100644 pymode/libs/logilab/common/urllib2ext.py create mode 100644 pymode/libs/logilab/common/vcgutils.py rename pymode/libs/{pylama/lint/pylama_pylint => }/logilab/common/visitor.py (97%) create mode 100644 pymode/libs/logilab/common/xmlutils.py create mode 100644 pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/METADATA create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/RECORD create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/WHEEL create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/metadata.json create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt create mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt rename pymode/libs/{pylama/lint/pylama_mccabe => }/mccabe.py (86%) create mode 100644 pymode/libs/pep257.py rename pymode/libs/{pylama/lint/pylama_pep8 => }/pep8.py (89%) create mode 100644 pymode/libs/pkg_resources/__init__.py rename pymode/libs/{pylama/lint/pylama_pylint/logilab => pkg_resources/_vendor}/__init__.py (100%) create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__about__.py create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__init__.py create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_compat.py create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_structures.py create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 pymode/libs/pkg_resources/_vendor/packaging/version.py create mode 100644 pymode/libs/pyflakes/__init__.py create mode 100644 pymode/libs/pyflakes/__main__.py create mode 100644 pymode/libs/pyflakes/api.py rename pymode/libs/{pylama/lint/pylama_pyflakes => }/pyflakes/checker.py (92%) rename pymode/libs/{pylama/lint/pylama_pyflakes => }/pyflakes/messages.py (94%) create mode 100644 pymode/libs/pyflakes/reporter.py create mode 100644 pymode/libs/pylama/__main__.py rename pymode/libs/pylama/{tasks.py => async.py} (55%) create mode 100644 pymode/libs/pylama/lint/pylama_mccabe.py delete mode 100644 pymode/libs/pylama/lint/pylama_mccabe/__init__.py create mode 100644 pymode/libs/pylama/lint/pylama_pep257.py delete mode 100644 pymode/libs/pylama/lint/pylama_pep257/__init__.py delete mode 100644 pymode/libs/pylama/lint/pylama_pep257/pep257.py rename pymode/libs/pylama/lint/{pylama_pep8/__init__.py => pylama_pep8.py} (65%) create mode 100644 pymode/libs/pylama/lint/pylama_pyflakes.py delete mode 100644 pymode/libs/pylama/lint/pylama_pyflakes/__init__.py delete mode 100644 pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py delete mode 100644 pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/__init__.py (96%) create mode 100644 pymode/libs/pylint/__main__.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/__pkginfo__.py (90%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/__init__.py (75%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/base.py (64%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/classes.py (65%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/design_analysis.py (75%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/exceptions.py (52%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/format.py (84%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/imports.py (86%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/logging.py (69%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/misc.py (64%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/newstyle.py (76%) create mode 100644 pymode/libs/pylint/checkers/python3.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/raw_metrics.py (100%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/similar.py (91%) create mode 100644 pymode/libs/pylint/checkers/spelling.py create mode 100644 pymode/libs/pylint/checkers/stdlib.py create mode 100644 pymode/libs/pylint/checkers/strings.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/typecheck.py (65%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/utils.py (69%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/checkers/variables.py (60%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/config.py (91%) create mode 100644 pymode/libs/pylint/epylint.py create mode 100644 pymode/libs/pylint/gui.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/interfaces.py (76%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/lint.py (56%) create mode 100644 pymode/libs/pylint/pyreverse/__init__.py create mode 100644 pymode/libs/pylint/pyreverse/diadefslib.py create mode 100644 pymode/libs/pylint/pyreverse/diagrams.py create mode 100644 pymode/libs/pylint/pyreverse/main.py create mode 100644 pymode/libs/pylint/pyreverse/utils.py create mode 100644 pymode/libs/pylint/pyreverse/writer.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/reporters/__init__.py (76%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/reporters/guireporter.py (74%) rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/reporters/html.py (59%) create mode 100644 pymode/libs/pylint/reporters/json.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/reporters/text.py (84%) create mode 100644 pymode/libs/pylint/testutils.py rename pymode/libs/{pylama/lint/pylama_pylint => }/pylint/utils.py (63%) create mode 100644 pymode/libs/six.py diff --git a/AUTHORS b/AUTHORS index aad20bd1..4ffea0c1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -33,10 +33,13 @@ Contributors: * Mel Boyce (http://github.com/syngin) * Mohammed (http://github.com/mbadran); * Naoya Inada (http://github.com/naoina); +* Nate Zhang (https://github.com/natezhang93) +* Paweł Korzeniewski (https://github.com/korzeniewskipl) * Pedro Algarvio (http://github.com/s0undt3ch); * Phillip Cloud (http://github.com/cpcloud); * Piet Delport (http://github.com/pjdelport); * Robert David Grant (http://github.com/bgrant); +* Robin Schneider (https://github.com/ypid) * Ronald Andreu Kaiser (http://github.com/cathoderay); * Samir Benmendil (https://github.com/Ram-Z) * Sorin Ionescu (sorin-ionescu); diff --git a/Changelog.rst b/Changelog.rst index 6e728832..e396eb69 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -3,11 +3,17 @@ Changelog * Pylama updated to version 5.0.5 * Rope libs updated +* Add wdb to debugger list in breakpoint cmd * Add 'pymode_options_max_line_length' option * Add ability to set related checker options `:help pymode-lint-options` Options added: 'pymode_lint_options_pep8', 'pymode_lint_options_pep257', 'pymode_lint_options_mccabe', 'pymode_lint_options_pyflakes', 'pymode_lint_options_pylint' +* Highlight comments inside class/function arg lists +* Don't fold single line def +* Don't skip a line when the first docstring contains text +* Add Python documentation vertical display option +* Rope: correct refactoring function calls ## 2014-06-11 0.8.1 diff --git a/Makefile b/Makefile index 38c009c2..e27a8785 100644 --- a/Makefile +++ b/Makefile @@ -33,10 +33,10 @@ rope: @cp -r $(CURDIR)/_/rope/rope $(CURDIR)/pymode/libs/. $(PYLAMA): - cp -r ~/Dropbox/projects/pylama/pylama $(PYLAMA) + cp -r $$PRJDIR/pylama/pylama $(PYLAMA) $(PYLAMA)/lint/pylama_pylint: - cp -r ~/Dropbox/projects/pylama/plugins/pylama_pylint/pylama_pylint/ $(PYLAMA)/lint/pylama_pylint + cp -r $$PRJDIR/pylama/plugins/pylama_pylint/pylama_pylint/ $(PYLAMA)/lint/pylama_pylint $(CURDIR)/build: mkdir -p $(CURDIR)/build/usr/share/vim/addons diff --git a/pymode/autopep8.py b/pymode/autopep8.py index 5f3ccf0b..13308751 100644 --- a/pymode/autopep8.py +++ b/pymode/autopep8.py @@ -1,8 +1,8 @@ #!/usr/bin/env python -# + # Copyright (C) 2010-2011 Hideo Hattori # Copyright (C) 2011-2013 Hideo Hattori, Steven Myint -# Copyright (C) 2013-2014 Hideo Hattori, Steven Myint, Bill Wendling +# Copyright (C) 2013-2015 Hideo Hattori, Steven Myint, Bill Wendling # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -53,10 +53,11 @@ import re import signal import sys +import textwrap import token import tokenize -from pylama.lint.pylama_pep8 import pep8 +import pep8 try: @@ -65,7 +66,7 @@ unicode = str -__version__ = '1.0' +__version__ = '1.2.1a0' CR = '\r' @@ -93,6 +94,7 @@ # W602 is handled separately due to the need to avoid "with_traceback". CODE_TO_2TO3 = { + 'E231': ['ws_comma'], 'E721': ['idioms'], 'W601': ['has_key'], 'W603': ['ne'], @@ -100,7 +102,6 @@ 'W690': ['apply', 'except', 'exitfunc', - 'import', 'numliterals', 'operator', 'paren', @@ -113,6 +114,14 @@ 'xreadlines']} +if sys.platform == 'win32': # pragma: no cover + DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8') +else: + DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or + os.path.expanduser('~/.config'), 'pep8') +PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8') + + def open_with_encoding(filename, encoding=None, mode='r'): """Return opened file with a specific encoding.""" if not encoding: @@ -146,15 +155,13 @@ def readlines_from_file(filename): def extended_blank_lines(logical_line, blank_lines, + blank_before, indent_level, previous_logical): """Check for missing blank lines after class declaration.""" if previous_logical.startswith('class '): - if ( - logical_line.startswith(('def ', 'class ', '@')) or - pep8.DOCSTRING_REGEX.match(logical_line) - ): - if indent_level and not blank_lines: + if logical_line.startswith(('def ', 'class ', '@')): + if indent_level and not blank_lines and not blank_before: yield (0, 'E309 expected 1 blank line after class declaration') elif previous_logical.startswith('def '): if blank_lines and pep8.DOCSTRING_REGEX.match(logical_line): @@ -164,6 +171,7 @@ def extended_blank_lines(logical_line, if ( indent_level and not blank_lines and + not blank_before and logical_line.startswith(('def ')) and '(self' in logical_line ): @@ -171,7 +179,8 @@ def extended_blank_lines(logical_line, pep8.register_check(extended_blank_lines) -def continued_indentation(logical_line, tokens, indent_level, noqa): +def continued_indentation(logical_line, tokens, indent_level, indent_char, + noqa): """Override pep8's function to provide indentation information.""" first_row = tokens[0][2][0] nrows = 1 + tokens[-1][2][0] - first_row @@ -185,6 +194,11 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): indent_next = logical_line.endswith(':') row = depth = 0 + valid_hangs = ( + (DEFAULT_INDENT_SIZE,) + if indent_char != '\t' else (DEFAULT_INDENT_SIZE, + 2 * DEFAULT_INDENT_SIZE) + ) # Remember how many brackets were opened on each line. parens = [0] * nrows @@ -192,6 +206,11 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): # Relative indents of physical lines. rel_indent = [0] * nrows + # For each depth, collect a list of opening rows. + open_rows = [[0]] + # For each depth, memorize the hanging indentation. + hangs = [None] + # Visual indents. indent_chances = {} last_indent = tokens[0][2] @@ -217,17 +236,18 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): # Record the initial indent. rel_indent[row] = pep8.expand_indent(line) - indent_level - if depth: - # A bracket expression in a continuation line. - # Find the line that it was opened on. - for open_row in range(row - 1, -1, -1): - if parens[open_row]: - break - else: - # An unbracketed continuation line (ie, backslash). - open_row = 0 - hang = rel_indent[row] - rel_indent[open_row] + # Identify closing bracket. close_bracket = (token_type == tokenize.OP and text in ']})') + + # Is the indent relative to an opening bracket line? + for open_row in reversed(open_rows[depth]): + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: + break + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) + visual_indent = (not close_bracket and hang > 0 and indent_chances.get(start[1])) @@ -237,23 +257,23 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): yield (start, 'E124 {0}'.format(indent[depth])) elif close_bracket and not hang: pass - elif visual_indent is True: - # Visual indent is verified. - if not indent[depth]: - indent[depth] = start[1] - elif visual_indent in (text, unicode): - # Ignore token lined up with matching one from a previous line. - pass elif indent[depth] and start[1] < indent[depth]: # Visual indent is broken. yield (start, 'E128 {0}'.format(indent[depth])) - elif (hang == DEFAULT_INDENT_SIZE or + elif (hanging_indent or (indent_next and rel_indent[row] == 2 * DEFAULT_INDENT_SIZE)): # Hanging indent is verified. if close_bracket: yield (start, 'E123 {0}'.format(indent_level + rel_indent[open_row])) + hangs[depth] = hang + elif visual_indent is True: + # Visual indent is verified. + indent[depth] = start[1] + elif visual_indent in (text, unicode): + # Ignore token lined up with matching one from a previous line. + pass else: one_indented = (indent_level + rel_indent[open_row] + DEFAULT_INDENT_SIZE) @@ -262,16 +282,20 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): error = ('E122', one_indented) elif indent[depth]: error = ('E127', indent[depth]) - elif hang % DEFAULT_INDENT_SIZE: - error = ('E121', one_indented) - else: + elif hang > DEFAULT_INDENT_SIZE: error = ('E126', one_indented) + else: + hangs[depth] = hang + error = ('E121', one_indented) yield (start, '{0} {1}'.format(*error)) # Look for visual indenting. - if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT) - and not indent[depth]): + if ( + parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth] + ): indent[depth] = start[1] indent_chances[start[1]] = True # Deal with implicit string concatenation. @@ -282,29 +306,36 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): # 4. elif not indent_chances and not row and not depth and text == 'if': indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) # Keep track of bracket depth. if token_type == tokenize.OP: if text in '([{': depth += 1 indent.append(0) + hangs.append(None) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) parens[row] += 1 elif text in ')]}' and depth > 0: # Parent indents should not be more than this one. prev_indent = indent.pop() or last_indent[1] + hangs.pop() for d in range(depth): if indent[d] > prev_indent: indent[d] = 0 for ind in list(indent_chances): if ind >= prev_indent: del indent_chances[ind] + del open_rows[depth + 1:] depth -= 1 if depth: indent_chances[indent[depth]] = True for idx in range(row, -1, -1): if parens[idx]: parens[idx] -= 1 - rel_indent[row] = rel_indent[idx] break assert len(indent) == depth + 1 if ( @@ -316,6 +347,9 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): indent_chances[start[1]] = text last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] + last_line = line if ( @@ -323,8 +357,9 @@ def continued_indentation(logical_line, tokens, indent_level, noqa): not last_line_begins_with_multiline and pep8.expand_indent(line) == indent_level + DEFAULT_INDENT_SIZE ): - yield (last_indent, 'E125 {0}'.format(indent_level + - 2 * DEFAULT_INDENT_SIZE)) + pos = (start[0], indent[0] + 4) + yield (pos, 'E125 {0}'.format(indent_level + + 2 * DEFAULT_INDENT_SIZE)) del pep8._checks['logical_line'][pep8.continued_indentation] pep8.register_check(continued_indentation) @@ -382,7 +417,10 @@ def __init__(self, filename, set() if long_line_ignore_cache is None else long_line_ignore_cache) - # method definition + # Many fixers are the same even though pep8 categorizes them + # differently. + self.fix_e115 = self.fix_e112 + self.fix_e116 = self.fix_e113 self.fix_e121 = self._fix_reindent self.fix_e122 = self._fix_reindent self.fix_e123 = self._fix_reindent @@ -412,8 +450,7 @@ def __init__(self, filename, options and (options.aggressive >= 2 or options.experimental) else self.fix_long_line_physically) self.fix_e703 = self.fix_e702 - - self._ws_comma_done = False + self.fix_w293 = self.fix_w291 def _fix_source(self, results): try: @@ -501,15 +538,20 @@ def fix(self): n=len(results), progress=progress), file=sys.stderr) if self.options.line_range: - results = [ - r for r in results - if self.options.line_range[0] <= r['line'] <= - self.options.line_range[1]] + start, end = self.options.line_range + results = [r for r in results + if start <= r['line'] <= end] self._fix_source(filter_results(source=''.join(self.source), results=results, - aggressive=self.options.aggressive, - indent_size=self.options.indent_size)) + aggressive=self.options.aggressive)) + + if self.options.line_range: + # If number of lines has changed then change line_range. + count = sum(sline.count('\n') + for sline in self.source[start - 1:end]) + self.options.line_range[1] = start + count - 1 + return ''.join(self.source) def _fix_reindent(self, result): @@ -524,6 +566,31 @@ def _fix_reindent(self, result): self.source[line_index] = ' ' * num_indent_spaces + target.lstrip() + def fix_e112(self, result): + """Fix under-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + if not target.lstrip().startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = self.indent_word + target + + def fix_e113(self, result): + """Fix over-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + indent = _get_indentation(target) + stripped = target.lstrip() + + if not stripped.startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = indent[1:] + stripped + def fix_e125(self, result): """Fix indentation undistinguish from the next logical line.""" num_indent_spaces = int(result['info'].split()[1]) @@ -582,17 +649,6 @@ def fix_e225(self, result): def fix_e231(self, result): """Add missing whitespace.""" - # Optimize for comma case. This will fix all commas in the full source - # code in one pass. Don't do this more than once. If it fails the first - # time, there is no point in trying again. - if ',' in result['info'] and not self._ws_comma_done: - self._ws_comma_done = True - original = ''.join(self.source) - new = refactor(original, ['ws_comma']) - if original.strip() != new.strip(): - self.source = [new] - return range(1, 1 + len(original)) - line_index = result['line'] - 1 target = self.source[line_index] offset = result['column'] @@ -795,8 +851,8 @@ def fix_long_line(self, target, previous_line, def fix_e502(self, result): """Remove extraneous escape of newline.""" - line_index = result['line'] - 1 - target = self.source[line_index] + (line_index, _, target) = get_index_offset_contents(result, + self.source) self.source[line_index] = target.rstrip('\n\r \t\\') + '\n' def fix_e701(self, result): @@ -835,14 +891,21 @@ def fix_e702(self, result, logical): second = (_get_indentation(logical_lines[0]) + target[offset:].lstrip(';').lstrip()) - self.source[line_index] = first + '\n' + second + # find inline commnet + inline_comment = None + if '# ' == target[offset:].lstrip(';').lstrip()[:2]: + inline_comment = target[offset:].lstrip(';') + + if inline_comment: + self.source[line_index] = first + inline_comment + else: + self.source[line_index] = first + '\n' + second return [line_index + 1] def fix_e711(self, result): """Fix comparison with None.""" - line_index = result['line'] - 1 - target = self.source[line_index] - offset = result['column'] - 1 + (line_index, offset, target) = get_index_offset_contents(result, + self.source) right_offset = offset + 2 if right_offset >= len(target): @@ -865,17 +928,16 @@ def fix_e711(self, result): self.source[line_index] = ' '.join([left, new_center, right]) def fix_e712(self, result): - """Fix comparison with boolean.""" - line_index = result['line'] - 1 - target = self.source[line_index] - offset = result['column'] - 1 + """Fix (trivial case of) comparison with boolean.""" + (line_index, offset, target) = get_index_offset_contents(result, + self.source) # Handle very easy "not" special cases. - if re.match(r'^\s*if \w+ == False:$', target): - self.source[line_index] = re.sub(r'if (\w+) == False:', + if re.match(r'^\s*if [\w.]+ == False:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) == False:', r'if not \1:', target, count=1) - elif re.match(r'^\s*if \w+ != True:$', target): - self.source[line_index] = re.sub(r'if (\w+) != True:', + elif re.match(r'^\s*if [\w.]+ != True:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) != True:', r'if not \1:', target, count=1) else: right_offset = offset + 2 @@ -903,15 +965,55 @@ def fix_e712(self, result): self.source[line_index] = left + new_right + def fix_e713(self, result): + """Fix (trivial case of) non-membership check.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + + # Handle very easy case only. + if re.match(r'^\s*if not [\w.]+ in [\w.]+:$', target): + self.source[line_index] = re.sub(r'if not ([\w.]+) in ([\w.]+):', + r'if \1 not in \2:', + target, + count=1) + def fix_w291(self, result): """Remove trailing whitespace.""" fixed_line = self.source[result['line'] - 1].rstrip() self.source[result['line'] - 1] = fixed_line + '\n' + def fix_w391(self, _): + """Remove trailing blank lines.""" + blank_count = 0 + for line in reversed(self.source): + line = line.rstrip() + if line: + break + else: + blank_count += 1 + + original_length = len(self.source) + self.source = self.source[:original_length - blank_count] + return range(1, 1 + original_length) + + +def get_index_offset_contents(result, source): + """Return (line_index, column_offset, line_contents).""" + line_index = result['line'] - 1 + return (line_index, + result['column'] - 1, + source[line_index]) + def get_fixed_long_line(target, previous_line, original, indent_word=' ', max_line_length=79, aggressive=False, experimental=False, verbose=False): + """Break up long line and return result. + + Do this by generating multiple reformatted candidates and then + ranking the candidates to heuristically select the best option. + + """ indent = _get_indentation(target) source = target[len(indent):] assert source.lstrip() == source @@ -930,19 +1032,28 @@ def get_fixed_long_line(target, previous_line, original, # Also sort alphabetically as a tie breaker (for determinism). candidates = sorted( sorted(set(candidates).union([target, original])), - key=lambda x: line_shortening_rank(x, - indent_word, - max_line_length)) + key=lambda x: line_shortening_rank( + x, + indent_word, + max_line_length, + experimental=experimental)) if verbose >= 4: print(('-' * 79 + '\n').join([''] + candidates + ['']), - file=codecs.getwriter('utf-8')(sys.stderr.buffer - if hasattr(sys.stderr, - 'buffer') - else sys.stderr)) + file=wrap_output(sys.stderr, 'utf-8')) if candidates: - return candidates[0] + best_candidate = candidates[0] + # Don't allow things to get longer. + if longest_line_length(best_candidate) > longest_line_length(original): + return None + else: + return best_candidate + + +def longest_line_length(code): + """Return length of longest line.""" + return max(len(line) for line in code.splitlines()) def join_logical_line(logical_line): @@ -978,11 +1089,11 @@ def untokenize_without_newlines(tokens): last_row = end_row last_column = end_column - return text + return text.rstrip() def _find_logical(source_lines): - # make a variable which is the index of all the starts of lines + # Make a variable which is the index of all the starts of lines. logical_start = [] logical_end = [] last_newline = True @@ -1071,7 +1182,7 @@ def split_and_strip_non_empty_lines(text): return [line.strip() for line in text.splitlines() if line.strip()] -def fix_e269(source, aggressive=False): +def fix_e265(source, aggressive=False): # pylint: disable=unused-argument """Format block comments.""" if '#' not in source: # Optimization. @@ -1093,11 +1204,13 @@ def fix_e269(source, aggressive=False): # Normalize beginning if not a shebang. if len(line) > 1: + pos = next((index for index, c in enumerate(line) + if c != '#')) if ( # Leave multiple spaces like '# ' alone. - (line.count('#') > 1 or line[1].isalnum()) + (line[:pos].count('#') > 1 or line[1].isalnum()) and # Leave stylistic outlined blocks alone. - and not line.rstrip().endswith('#') + not line.rstrip().endswith('#') ): line = '# ' + line.lstrip('# \t') @@ -1108,7 +1221,7 @@ def fix_e269(source, aggressive=False): return ''.join(fixed_lines) -def refactor(source, fixer_names, ignore=None): +def refactor(source, fixer_names, ignore=None, filename=''): """Return refactored code using lib2to3. Skip if ignore string is produced in the refactored code. @@ -1117,7 +1230,8 @@ def refactor(source, fixer_names, ignore=None): from lib2to3 import pgen2 try: new_text = refactor_with_2to3(source, - fixer_names=fixer_names) + fixer_names=fixer_names, + filename=filename) except (pgen2.parse.ParseError, SyntaxError, UnicodeDecodeError, @@ -1139,7 +1253,8 @@ def code_to_2to3(select, ignore): return fixes -def fix_2to3(source, aggressive=True, select=None, ignore=None): +def fix_2to3(source, + aggressive=True, select=None, ignore=None, filename=''): """Fix various deprecated code (via lib2to3).""" if not aggressive: return source @@ -1149,7 +1264,8 @@ def fix_2to3(source, aggressive=True, select=None, ignore=None): return refactor(source, code_to_2to3(select=select, - ignore=ignore)) + ignore=ignore), + filename=filename) def fix_w602(source, aggressive=True): @@ -1217,7 +1333,7 @@ def get_diff_text(old, new, filename): text += line # Work around missing newline (http://bugs.python.org/issue2142). - if not line.endswith(newline): + if text and not line.endswith(newline): text += newline + r'\ No newline at end of file' + newline return text @@ -1291,7 +1407,6 @@ def shorten_line(tokens, source, indentation, indent_word, max_line_length, tokens=tokens, source=source, indentation=indentation, - indent_word=indent_word, max_line_length=max_line_length): yield shortened @@ -1429,16 +1544,25 @@ def __repr__(self): ########################################################################### # Public Methods - def add(self, obj, indent_amt): + def add(self, obj, indent_amt, break_after_open_bracket): if isinstance(obj, Atom): self._add_item(obj, indent_amt) return - self._add_container(obj, indent_amt) + self._add_container(obj, indent_amt, break_after_open_bracket) def add_comment(self, item): - self._lines.append(self._Space()) - self._lines.append(self._Space()) + num_spaces = 2 + if len(self._lines) > 1: + if isinstance(self._lines[-1], self._Space): + num_spaces -= 1 + if len(self._lines) > 2: + if isinstance(self._lines[-2], self._Space): + num_spaces -= 1 + + while num_spaces > 0: + self._lines.append(self._Space()) + num_spaces -= 1 self._lines.append(item) def add_indent(self, indent_amt): @@ -1460,8 +1584,8 @@ def add_space_if_needed(self, curr_text, equal=False): return prev_text = unicode(self._prev_item) - prev_prev_text = \ - unicode(self._prev_prev_item) if self._prev_prev_item else '' + prev_prev_text = ( + unicode(self._prev_prev_item) if self._prev_prev_item else '') if ( # The previous item was a keyword or identifier and the current @@ -1494,10 +1618,14 @@ def add_space_if_needed(self, curr_text, equal=False): (self._prev_prev_item.is_name or self._prev_prev_item.is_number or self._prev_prev_item.is_string)) and - prev_text in ('+', '-', '%', '*', '/', '//', '**'))))) + prev_text in ('+', '-', '%', '*', '/', '//', '**', 'in'))))) ): self._lines.append(self._Space()) + def previous_item(self): + """Return the previous non-whitespace item.""" + return self._prev_item + def fits_on_current_line(self, item_extent): return self.current_size() + item_extent <= self._max_line_length @@ -1569,24 +1697,41 @@ def _add_item(self, item, indent_amt): self._bracket_depth -= 1 assert self._bracket_depth >= 0 - def _add_container(self, container, indent_amt): + def _add_container(self, container, indent_amt, break_after_open_bracket): + actual_indent = indent_amt + 1 + if ( unicode(self._prev_item) != '=' and not self.line_empty() and not self.fits_on_current_line( - container.size + self._bracket_depth + 2) and - - # Don't split before the opening bracket of a call. - (unicode(container)[0] != '(' or not self._prev_item.is_name) + container.size + self._bracket_depth + 2) ): - # If the container doesn't fit on the current line and the current - # line isn't empty, place the container on the next line. - self._lines.append(self._LineBreak()) - self._lines.append(self._Indent(indent_amt)) + + if unicode(container)[0] == '(' and self._prev_item.is_name: + # Don't split before the opening bracket of a call. + break_after_open_bracket = True + actual_indent = indent_amt + 4 + elif ( + break_after_open_bracket or + unicode(self._prev_item) not in '([{' + ): + # If the container doesn't fit on the current line and the + # current line isn't empty, place the container on the next + # line. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + break_after_open_bracket = False + else: + actual_indent = self.current_size() + 1 + break_after_open_bracket = False + + if isinstance(container, (ListComprehension, IfExpression)): + actual_indent = indent_amt # Increase the continued indentation only if recursing on a # container. - container.reflow(self, ' ' * (indent_amt + 1)) + container.reflow(self, ' ' * actual_indent, + break_after_open_bracket=break_after_open_bracket) def _prevent_default_initializer_splitting(self, item, indent_amt): """Prevent splitting between a default initializer. @@ -1636,9 +1781,15 @@ def _split_after_delimiter(self, item, indent_amt): last_space = None for item in reversed(self._lines): + if ( + last_space and + (not isinstance(item, Atom) or not item.is_colon) + ): + break + else: + last_space = None if isinstance(item, self._Space): last_space = item - break if isinstance(item, (self._LineBreak, self._Indent)): return @@ -1693,8 +1844,12 @@ def __repr__(self): def __len__(self): return self.size - def reflow(self, reflowed_lines, continued_indent, extent, - break_after_open_bracket=False): + def reflow( + self, reflowed_lines, continued_indent, extent, + break_after_open_bracket=False, + is_list_comp_or_if_expr=False, + next_is_dot=False + ): if self._atom.token_type == tokenize.COMMENT: reflowed_lines.add_comment(self) return @@ -1705,9 +1860,16 @@ def reflow(self, reflowed_lines, continued_indent, extent, # Some atoms will need an extra 1-sized space token after them. total_size += 1 + prev_item = reflowed_lines.previous_item() if ( + not is_list_comp_or_if_expr and not reflowed_lines.fits_on_current_line(total_size) and - not reflowed_lines.line_empty() + not (next_is_dot and + reflowed_lines.fits_on_current_line(self.size + 1)) and + not reflowed_lines.line_empty() and + not self.is_colon and + not (prev_item and prev_item.is_name and + unicode(self) == '(') ): # Start a new line if there is already something on the line and # adding this atom would make it go over the max line length. @@ -1715,7 +1877,8 @@ def reflow(self, reflowed_lines, continued_indent, extent, else: reflowed_lines.add_space_if_needed(unicode(self)) - reflowed_lines.add(self, len(continued_indent)) + reflowed_lines.add(self, len(continued_indent), + break_after_open_bracket) def emit(self): return self.__repr__() @@ -1788,14 +1951,27 @@ def __getitem__(self, idx): def reflow(self, reflowed_lines, continued_indent, break_after_open_bracket=False): + last_was_container = False for (index, item) in enumerate(self._items): + next_item = get_item(self._items, index + 1) + if isinstance(item, Atom): + is_list_comp_or_if_expr = ( + isinstance(self, (ListComprehension, IfExpression))) item.reflow(reflowed_lines, continued_indent, - self._get_extent(index)) + self._get_extent(index), + is_list_comp_or_if_expr=is_list_comp_or_if_expr, + next_is_dot=(next_item and + unicode(next_item) == '.')) + if last_was_container and item.is_comma: + reflowed_lines.add_line_break(continued_indent) + last_was_container = False else: # isinstance(item, Container) - reflowed_lines.add(item, len(continued_indent)) + reflowed_lines.add(item, len(continued_indent), + break_after_open_bracket) + last_was_container = not isinstance(item, (ListComprehension, + IfExpression)) - next_item = get_item(self._items, index + 1) if ( break_after_open_bracket and index == 0 and # Prefer to keep empty containers together instead of @@ -1809,12 +1985,14 @@ def reflow(self, reflowed_lines, continued_indent, else: next_next_item = get_item(self._items, index + 2) if ( - unicode(item) not in '.%' and next_item and - next_next_item and unicode(next_item) != ':' and - not isinstance(next_next_item, Atom) and + unicode(item) not in ['.', '%', 'in'] and + next_item and not isinstance(next_item, Container) and + unicode(next_item) != ':' and + next_next_item and (not isinstance(next_next_item, Atom) or + unicode(next_item) == 'not') and not reflowed_lines.line_empty() and not reflowed_lines.fits_on_current_line( - next_item.size + next_next_item.size + 2) + self._get_extent(index + 1) + 2) ): reflowed_lines.add_line_break(continued_indent) @@ -1822,14 +2000,37 @@ def _get_extent(self, index): """The extent of the full element. E.g., the length of a function call or keyword. + """ extent = 0 + prev_item = get_item(self._items, index - 1) + seen_dot = prev_item and unicode(prev_item) == '.' while index < len(self._items): item = get_item(self._items, index) - if unicode(item) not in '.=' and not item.is_name: - break - extent += len(item) index += 1 + + if isinstance(item, (ListComprehension, IfExpression)): + break + + if isinstance(item, Container): + if prev_item and prev_item.is_name: + if seen_dot: + extent += 1 + else: + extent += item.size + + prev_item = item + continue + elif (unicode(item) not in ['.', '=', ':', 'not'] and + not item.is_name and not item.is_string): + break + + if unicode(item) == '.': + seen_dot = True + + extent += item.size + prev_item = item + return extent @property @@ -1908,6 +2109,15 @@ class ListComprehension(Container): """A high-level representation of a list comprehension.""" + @property + def size(self): + length = 0 + for item in self._items: + if isinstance(item, IfExpression): + break + length += item.size + return length + class IfExpression(Container): @@ -1970,6 +2180,8 @@ def _parse_container(tokens, index, for_or_if=None): index += 1 + return (None, None) + def _parse_tokens(tokens): """Parse the tokens. @@ -1993,6 +2205,8 @@ def _parse_tokens(tokens): if tok.token_string in '([{': (container, index) = _parse_container(tokens, index) + if not container: + return None parsed_tokens.append(container) else: parsed_tokens.append(Atom(tok)) @@ -2002,7 +2216,7 @@ def _parse_tokens(tokens): return parsed_tokens -def _reflow_lines(parsed_tokens, indentation, indent_word, max_line_length, +def _reflow_lines(parsed_tokens, indentation, max_line_length, start_on_prefix_line): """Reflow the lines so that it looks nice.""" @@ -2015,7 +2229,20 @@ def _reflow_lines(parsed_tokens, indentation, indent_word, max_line_length, break_after_open_bracket = not start_on_prefix_line lines = ReformattedLines(max_line_length) - lines.add_indent(len(indentation)) + lines.add_indent(len(indentation.lstrip('\r\n'))) + + if not start_on_prefix_line: + # If splitting after the opening bracket will cause the first element + # to be aligned weirdly, don't try it. + first_token = get_item(parsed_tokens, 0) + second_token = get_item(parsed_tokens, 1) + + if ( + first_token and second_token and + unicode(second_token)[0] == '(' and + len(indentation) + len(first_token) + 1 == len(continued_indent) + ): + return None for item in parsed_tokens: lines.add_space_if_needed(unicode(item), equal=True) @@ -2031,7 +2258,7 @@ def _reflow_lines(parsed_tokens, indentation, indent_word, max_line_length, return lines.emit() -def _shorten_line_at_tokens_new(tokens, source, indentation, indent_word, +def _shorten_line_at_tokens_new(tokens, source, indentation, max_line_length): """Shorten the line taking its length into account. @@ -2048,14 +2275,14 @@ def _shorten_line_at_tokens_new(tokens, source, indentation, indent_word, if parsed_tokens: # Perform two reflows. The first one starts on the same line as the # prefix. The second starts on the line after the prefix. - fixed = _reflow_lines(parsed_tokens, indentation, indent_word, - max_line_length, start_on_prefix_line=True) - if check_syntax(normalize_multiline(fixed.lstrip())): + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=True) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): yield fixed - fixed = _reflow_lines(parsed_tokens, indentation, indent_word, - max_line_length, start_on_prefix_line=False) - if check_syntax(normalize_multiline(fixed.lstrip())): + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=False) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): yield fixed @@ -2183,6 +2410,8 @@ def normalize_multiline(line): return line + 'def _(): pass' elif line.startswith('class '): return line + ' pass' + elif line.startswith(('if ', 'elif ', 'for ', 'while ')): + return line + ' pass' else: return line @@ -2208,9 +2437,12 @@ def __init__(self, options): super(QuietReport, self).__init__(options) self.__full_error_results = [] - def error(self, line_number, offset, text, _): + def error(self, line_number, offset, text, check): """Collect errors.""" - code = super(QuietReport, self).error(line_number, offset, text, _) + code = super(QuietReport, self).error(line_number, + offset, + text, + check) if code: self.__full_error_results.append( {'id': code, @@ -2283,8 +2515,6 @@ def run(self, indent_size=DEFAULT_INDENT_SIZE): return self.input_text # Remove trailing empty lines. lines = self.lines - while lines and lines[-1] == '\n': - lines.pop() # Sentinel. stats.append((len(lines), 0)) # Map count of leading spaces to # we want. @@ -2315,8 +2545,8 @@ def run(self, indent_size=DEFAULT_INDENT_SIZE): if have == _leading_space_count(lines[jline]): want = jlevel * indent_size break - if want < 0: # Maybe it's a hanging - # comment like this one, + if want < 0: # Maybe it's a hanging + # comment like this one, # in which case we should shift it like its base # line got shifted. for j in range(i - 1, -1, -1): @@ -2370,8 +2600,8 @@ def _reindent_stats(tokens): our headache! """ - find_stmt = 1 # next token begins a fresh stmt? - level = 0 # current indent level + find_stmt = 1 # Next token begins a fresh stmt? + level = 0 # Current indent level. stats = [] for t in tokens: @@ -2396,8 +2626,8 @@ def _reindent_stats(tokens): elif token_type == tokenize.COMMENT: if find_stmt: stats.append((sline, -1)) - # but we're still looking for a new stmt, so leave - # find_stmt alone + # But we're still looking for a new stmt, so leave + # find_stmt alone. elif token_type == tokenize.NL: pass @@ -2407,7 +2637,7 @@ def _reindent_stats(tokens): # must be the first token of the next program statement, or an # ENDMARKER. find_stmt = 0 - if line: # not endmarker + if line: # Not endmarker. stats.append((sline, level)) return stats @@ -2421,7 +2651,7 @@ def _leading_space_count(line): return i -def refactor_with_2to3(source_text, fixer_names): +def refactor_with_2to3(source_text, fixer_names, filename=''): """Use lib2to3 to refactor the source. Return the refactored source code. @@ -2433,7 +2663,8 @@ def refactor_with_2to3(source_text, fixer_names): from lib2to3.pgen2 import tokenize as lib2to3_tokenize try: - return unicode(tool.refactor_string(source_text, name='')) + # The name parameter is necessary particularly for the "import" fixer. + return unicode(tool.refactor_string(source_text, name=filename)) except lib2to3_tokenize.TokenError: return source_text @@ -2446,7 +2677,7 @@ def check_syntax(code): return False -def filter_results(source, results, aggressive, indent_size): +def filter_results(source, results, aggressive): """Filter out spurious reports from pep8. If aggressive is True, we allow possibly unsafe fixes (E711, E712). @@ -2459,6 +2690,8 @@ def filter_results(source, results, aggressive, indent_size): commented_out_code_line_numbers = commented_out_code_lines(source) + has_e901 = any(result['id'].lower() == 'e901' for result in results) + for r in results: issue_id = r['id'].lower() @@ -2483,13 +2716,20 @@ def filter_results(source, results, aggressive, indent_size): continue if aggressive <= 1: - if issue_id.startswith(('e712', )): + if issue_id.startswith(('e712', 'e713')): continue if r['line'] in commented_out_code_line_numbers: if issue_id.startswith(('e26', 'e501')): continue + # Do not touch indentation if there is a token error caused by + # incomplete multi-line statement. Otherwise, we risk screwing up the + # indentation. + if has_e901: + if issue_id.startswith(('e1', 'e7')): + continue + yield r @@ -2582,7 +2822,6 @@ def shorten_comment(line, max_line_length, last_comment=False): # Trim comments that end with things like --------- return line[:max_line_length] + '\n' elif last_comment and re.match(r'\s*#+\s*\w+', line): - import textwrap split_lines = textwrap.wrap(line.lstrip(' \t#'), initial_indent=indentation, subsequent_indent=indentation, @@ -2624,18 +2863,45 @@ def code_match(code, select, ignore): return True -def fix_code(source, options=None): - """Return fixed source code.""" - if not options: - options = parse_args(['']) +def fix_code(source, options=None, encoding=None, apply_config=False): + """Return fixed source code. + + "encoding" will be used to decode "source" if it is a byte string. + + """ + options = _get_options(options, apply_config) if not isinstance(source, unicode): - source = source.decode(locale.getpreferredencoding(False)) + source = source.decode(encoding or get_encoding()) sio = io.StringIO(source) return fix_lines(sio.readlines(), options=options) +def _get_options(raw_options, apply_config): + """Return parsed options.""" + if not raw_options: + return parse_args([''], apply_config=apply_config) + + if isinstance(raw_options, dict): + options = parse_args([''], apply_config=apply_config) + for name, value in raw_options.items(): + if not hasattr(options, name): + raise ValueError("No such option '{}'".format(name)) + + # Check for very basic type errors. + expected_type = type(getattr(options, name)) + if not isinstance(expected_type, (str, unicode)): + if isinstance(value, (str, unicode)): + raise ValueError( + "Option '{}' should not be a string".format(name)) + setattr(options, name, value) + else: + options = raw_options + + return options + + def fix_lines(source_lines, options, filename=''): """Return fixed source code.""" # Transform everything to line feed. Then change them back to original @@ -2647,10 +2913,13 @@ def fix_lines(source_lines, options, filename=''): previous_hashes = set() if options.line_range: + # Disable "apply_local_fixes()" for now due to issue #175. fixed_source = tmp_source else: # Apply global fixes only once (for efficiency). - fixed_source = apply_global_fixes(tmp_source, options) + fixed_source = apply_global_fixes(tmp_source, + options, + filename=filename) passes = 0 long_line_ignore_cache = set() @@ -2675,9 +2944,9 @@ def fix_lines(source_lines, options, filename=''): return ''.join(normalize_line_endings(sio.readlines(), original_newline)) -def fix_file(filename, options=None, output=None): +def fix_file(filename, options=None, output=None, apply_config=False): if not options: - options = parse_args([filename]) + options = parse_args([filename], apply_config=apply_config) original_source = readlines_from_file(filename) @@ -2687,11 +2956,7 @@ def fix_file(filename, options=None, output=None): encoding = detect_encoding(filename) if output: - output = codecs.getwriter(encoding)(output.buffer - if hasattr(output, 'buffer') - else output) - - output = LineEndingWrapper(output) + output = LineEndingWrapper(wrap_output(output, encoding=encoding)) fixed_source = fix_lines(fixed_source, options, filename=filename) @@ -2719,7 +2984,7 @@ def fix_file(filename, options=None, output=None): def global_fixes(): """Yield multiple (code, function) tuples.""" - for function in globals().values(): + for function in list(globals().values()): if inspect.isfunction(function): arguments = inspect.getargspec(function)[0] if arguments[:1] != ['source']: @@ -2730,21 +2995,23 @@ def global_fixes(): yield (code, function) -def apply_global_fixes(source, options): +def apply_global_fixes(source, options, where='global', filename=''): """Run global fixes on source code. These are fixes that only need be done once (unlike those in FixPEP8, which are dependent on pep8). """ - if code_match('E101', select=options.select, ignore=options.ignore): + if any(code_match(code, select=options.select, ignore=options.ignore) + for code in ['E101', 'E111']): source = reindent(source, indent_size=options.indent_size) for (code, function) in global_fixes(): if code_match(code, select=options.select, ignore=options.ignore): if options.verbose: - print('---> Applying global fix for {0}'.format(code.upper()), + print('---> Applying {0} fix for {1}'.format(where, + code.upper()), file=sys.stderr) source = function(source, aggressive=options.aggressive) @@ -2752,7 +3019,8 @@ def apply_global_fixes(source, options): source = fix_2to3(source, aggressive=options.aggressive, select=options.select, - ignore=options.ignore) + ignore=options.ignore, + filename=filename) return source @@ -2784,48 +3052,57 @@ def create_parser(): prog='autopep8') parser.add_argument('--version', action='version', version='%(prog)s ' + __version__) - parser.add_argument('-v', '--verbose', action='count', dest='verbose', + parser.add_argument('-v', '--verbose', action='count', default=0, help='print verbose messages; ' - 'multiple -v result in more verbose messages') - parser.add_argument('-d', '--diff', action='store_true', dest='diff', + 'multiple -v result in more verbose messages') + parser.add_argument('-d', '--diff', action='store_true', help='print the diff for the fixed source') parser.add_argument('-i', '--in-place', action='store_true', help='make changes to files in place') + parser.add_argument('--global-config', metavar='filename', + default=DEFAULT_CONFIG, + help='path to a global pep8 config file; if this file ' + 'does not exist then this is ignored ' + '(default: {0})'.format(DEFAULT_CONFIG)) + parser.add_argument('--ignore-local-config', action='store_true', + help="don't look for and apply local config files; " + 'if not passed, defaults are updated with any ' + "config files in the project's root directory") parser.add_argument('-r', '--recursive', action='store_true', help='run recursively over directories; ' - 'must be used with --in-place or --diff') + 'must be used with --in-place or --diff') parser.add_argument('-j', '--jobs', type=int, metavar='n', default=1, help='number of parallel jobs; ' - 'match CPU count if value is less than 1') + 'match CPU count if value is less than 1') parser.add_argument('-p', '--pep8-passes', metavar='n', default=-1, type=int, help='maximum number of additional pep8 passes ' - '(default: infinite)') + '(default: infinite)') parser.add_argument('-a', '--aggressive', action='count', default=0, help='enable non-whitespace changes; ' - 'multiple -a result in more aggressive changes') + 'multiple -a result in more aggressive changes') parser.add_argument('--experimental', action='store_true', help='enable experimental fixes') parser.add_argument('--exclude', metavar='globs', help='exclude file/directory names that match these ' - 'comma-separated globs') + 'comma-separated globs') parser.add_argument('--list-fixes', action='store_true', help='list codes for fixes; ' 'used by --ignore and --select') parser.add_argument('--ignore', metavar='errors', default='', help='do not fix these errors/warnings ' - '(default: {0})'.format(DEFAULT_IGNORE)) + '(default: {0})'.format(DEFAULT_IGNORE)) parser.add_argument('--select', metavar='errors', default='', help='fix only these errors/warnings (e.g. E4,W)') parser.add_argument('--max-line-length', metavar='n', default=79, type=int, help='set maximum allowed line length ' - '(default: %(default)s)') - parser.add_argument('--range', metavar='line', dest='line_range', + '(default: %(default)s)') + parser.add_argument('--line-range', '--range', metavar='line', default=None, type=int, nargs=2, help='only fix errors found within this inclusive ' - 'range of line numbers (e.g. 1 99); ' - 'line numbers are indexed at 1') + 'range of line numbers (e.g. 1 99); ' + 'line numbers are indexed at 1') parser.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE, type=int, metavar='n', help='number of spaces per indent level ' @@ -2836,7 +3113,7 @@ def create_parser(): return parser -def parse_args(arguments): +def parse_args(arguments, apply_config=False): """Parse command-line options.""" parser = create_parser() args = parser.parse_args(arguments) @@ -2846,6 +3123,11 @@ def parse_args(arguments): args.files = [decode_filename(name) for name in args.files] + if apply_config: + parser = read_config(args, parser) + args = parser.parse_args(arguments) + args.files = [decode_filename(name) for name in args.files] + if '-' in args.files: if len(args.files) > 1: parser.error('cannot mix stdin and regular files') @@ -2867,9 +3149,6 @@ def parse_args(arguments): if args.recursive and not (args.in_place or args.diff): parser.error('--recursive must be used with --in-place or --diff') - if args.exclude and not args.recursive: - parser.error('--exclude is only relevant when used with --recursive') - if args.in_place and args.diff: parser.error('--in-place and --diff are mutually exclusive') @@ -2877,19 +3156,19 @@ def parse_args(arguments): parser.error('--max-line-length must be greater than 0') if args.select: - args.select = args.select.split(',') + args.select = _split_comma_separated(args.select) if args.ignore: - args.ignore = args.ignore.split(',') + args.ignore = _split_comma_separated(args.ignore) elif not args.select: if args.aggressive: # Enable everything by default if aggressive. args.select = ['E', 'W'] else: - args.ignore = DEFAULT_IGNORE.split(',') + args.ignore = _split_comma_separated(DEFAULT_IGNORE) if args.exclude: - args.exclude = args.exclude.split(',') + args.exclude = _split_comma_separated(args.exclude) else: args.exclude = [] @@ -2902,9 +3181,54 @@ def parse_args(arguments): if args.jobs > 1 and not args.in_place: parser.error('parallel jobs requires --in-place') + if args.line_range: + if args.line_range[0] <= 0: + parser.error('--range must be positive numbers') + if args.line_range[0] > args.line_range[1]: + parser.error('First value of --range should be less than or equal ' + 'to the second') + return args +def read_config(args, parser): + """Read both user configuration and local configuration.""" + try: + from configparser import ConfigParser as SafeConfigParser + from configparser import Error + except ImportError: + from ConfigParser import SafeConfigParser + from ConfigParser import Error + + config = SafeConfigParser() + + try: + config.read(args.global_config) + + if not args.ignore_local_config: + parent = tail = args.files and os.path.abspath( + os.path.commonprefix(args.files)) + while tail: + if config.read([os.path.join(parent, fn) + for fn in PROJECT_CONFIG]): + break + (parent, tail) = os.path.split(parent) + + defaults = dict((k.lstrip('-').replace('-', '_'), v) + for k, v in config.items('pep8')) + parser.set_defaults(**defaults) + except Error: + # Ignore for now. + pass + + return parser + + +def _split_comma_separated(string): + """Return a set of strings.""" + return set(text.strip() for text in string.split(',') if text.strip()) + + def decode_filename(filename): """Return Unicode filename.""" if isinstance(filename, unicode): @@ -2946,7 +3270,8 @@ def docstring_summary(docstring): return docstring.split('\n')[0] -def line_shortening_rank(candidate, indent_word, max_line_length): +def line_shortening_rank(candidate, indent_word, max_line_length, + experimental=False): """Return rank of candidate. This is for sorting candidates. @@ -2956,19 +3281,25 @@ def line_shortening_rank(candidate, indent_word, max_line_length): return 0 rank = 0 - lines = candidate.split('\n') + lines = candidate.rstrip().split('\n') offset = 0 if ( not lines[0].lstrip().startswith('#') and lines[0].rstrip()[-1] not in '([{' ): - for symbol in '([{': - offset = max(offset, 1 + lines[0].find(symbol)) + for (opening, closing) in ('()', '[]', '{}'): + # Don't penalize empty containers that aren't split up. Things like + # this "foo(\n )" aren't particularly good. + opening_loc = lines[0].find(opening) + closing_loc = lines[0].find(closing) + if opening_loc >= 0: + if closing_loc < 0 or closing_loc != opening_loc + 1: + offset = max(offset, 1 + opening_loc) current_longest = max(offset + len(x.strip()) for x in lines) - rank += 2 * max(0, current_longest - max_line_length) + rank += 4 * max(0, current_longest - max_line_length) rank += len(lines) @@ -3001,18 +3332,38 @@ def line_shortening_rank(candidate, indent_word, max_line_length): if current_line == bad_start: rank += 1000 - if current_line.endswith(('(', '[', '{')): + if ( + current_line.endswith(('.', '%', '+', '-', '/')) and + "': " in current_line + ): + rank += 1000 + + if current_line.endswith(('(', '[', '{', '.')): # Avoid lonely opening. They result in longer lines. if len(current_line) <= len(indent_word): rank += 100 - # Avoid ugliness of ", (\n". - if current_line.endswith(','): + # Avoid the ugliness of ", (\n". + if ( + current_line.endswith('(') and + current_line[:-1].rstrip().endswith(',') + ): + rank += 100 + + # Also avoid the ugliness of "foo.\nbar" + if current_line.endswith('.'): rank += 100 if has_arithmetic_operator(current_line): rank += 100 + # Avoid breaking at unary operators. + if re.match(r'.*[(\[{]\s*[\-\+~]$', current_line.rstrip('\\ ')): + rank += 1000 + + if re.match(r'.*lambda\s*\*$', current_line.rstrip('\\ ')): + rank += 1000 + if current_line.endswith(('%', '(', '[', '{')): rank -= 20 @@ -3044,12 +3395,16 @@ def line_shortening_rank(candidate, indent_word, max_line_length): if total_len < max_line_length: rank += 10 else: - rank += 1 + rank += 100 if experimental else 1 # Prefer breaking at commas rather than colon. if ',' in current_line and current_line.endswith(':'): rank += 10 + # Avoid splitting dictionaries between key and value. + if current_line.endswith(':'): + rank += 100 + rank += 10 * count_unbalanced_brackets(current_line) return max(0, rank) @@ -3132,6 +3487,8 @@ def match_file(filename, exclude): for pattern in exclude: if fnmatch.fnmatch(base_name, pattern): return False + if fnmatch.fnmatch(filename, pattern): + return False if not os.path.isdir(filename) and not is_python_file(filename): return False @@ -3213,8 +3570,23 @@ def is_probably_part_of_multiline(line): ) -def main(): - """Tool main.""" +def wrap_output(output, encoding): + """Return output with specified encoding.""" + return codecs.getwriter(encoding)(output.buffer + if hasattr(output, 'buffer') + else output) + + +def get_encoding(): + """Return preferred encoding.""" + return locale.getpreferredencoding() or sys.getdefaultencoding() + + +def main(argv=None, apply_config=True): + """Command-line entry.""" + if argv is None: + argv = sys.argv + try: # Exit on broken pipe. signal.signal(signal.SIGPIPE, signal.SIG_DFL) @@ -3223,7 +3595,7 @@ def main(): pass try: - args = parse_args(sys.argv[1:]) + args = parse_args(argv[1:], apply_config=apply_config) if args.list_fixes: for code, description in sorted(supported_fixes()): @@ -3234,9 +3606,12 @@ def main(): if args.files == ['-']: assert not args.in_place + encoding = sys.stdin.encoding or get_encoding() + # LineEndingWrapper is unnecessary here due to the symmetry between # standard in and standard out. - sys.stdout.write(fix_code(sys.stdin.read(), args)) + wrap_output(sys.stdout, encoding=encoding).write( + fix_code(sys.stdin.read(), args, encoding=encoding)) else: if args.in_place or args.diff: args.files = list(set(args.files)) diff --git a/pymode/environment.py b/pymode/environment.py index 43246cea..c146ea6e 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -1,40 +1,40 @@ -""" Define interfaces. """ +"""Define interfaces.""" from __future__ import print_function -import vim import json -import time import os.path +import time +import vim # noqa from ._compat import PY2 class VimPymodeEnviroment(object): - """ Vim User interface. """ + """Vim User interface.""" prefix = '[Pymode]' def __init__(self): - """ Init VIM environment. """ + """Init VIM environment.""" self.current = vim.current self.options = dict(encoding=vim.eval('&enc')) self.options['debug'] = self.var('g:pymode_debug', True) @property def curdir(self): - """ Return current working directory. """ + """Return current working directory.""" return self.var('getcwd()') @property def curbuf(self): - """ Return current buffer. """ + """Return current buffer.""" return self.current.buffer @property def cursor(self): - """ Return current window position. + """Return current window position. :return tuple: (row, col) @@ -43,12 +43,12 @@ def cursor(self): @property def source(self): - """ Return source of current buffer. """ + """Return source of current buffer.""" return "\n".join(self.lines) @property def lines(self): - """ Iterate by lines in current file. + """Iterate by lines in current file. :return list: @@ -60,7 +60,7 @@ def lines(self): @staticmethod def var(name, to_bool=False, silence=False): - """ Get vim variable. + """Get vim variable. :return vimobj: @@ -81,7 +81,7 @@ def var(name, to_bool=False, silence=False): @staticmethod def message(msg, history=False): - """ Show message to user. + """Show message to user. :return: :None @@ -92,7 +92,7 @@ def message(msg, history=False): return vim.command('call pymode#wide_message("%s")' % str(msg)) def user_input(self, msg, default=''): - """ Return user input or default. + """Return user input or default. :return str: @@ -112,7 +112,7 @@ def user_input(self, msg, default=''): return input_str or default def user_confirm(self, msg, yes=False): - """ Get user confirmation. + """Get user confirmation. :return bool: @@ -122,7 +122,7 @@ def user_confirm(self, msg, yes=False): return action and 'yes'.startswith(action) def user_input_choices(self, msg, *options): - """ Get one of many options. + """Get one of many options. :return str: A choosen option @@ -148,24 +148,24 @@ def user_input_choices(self, msg, *options): @staticmethod def error(msg): - """ Show error to user. """ + """Show error to user.""" vim.command('call pymode#error("%s")' % str(msg)) def debug(self, msg, *args): - """ Print debug information. """ + """Print debug information.""" if self.options.get('debug'): print("%s %s [%s]" % ( int(time.time()), msg, ', '.join([str(a) for a in args]))) def stop(self, value=None): - """ Break Vim function. """ + """Break Vim function.""" cmd = 'return' if value is not None: cmd += ' ' + self.prepare_value(value) vim.command(cmd) def catch_exceptions(self, func): - """ Decorator. Make execution more silence. + """Decorator. Make execution more silence. :return func: @@ -181,19 +181,19 @@ def _wrapper(*args, **kwargs): return _wrapper def run(self, name, *args): - """ Run vim function. """ + """Run vim function.""" vim.command('call %s(%s)' % (name, ", ".join([ self.prepare_value(a) for a in args ]))) def let(self, name, value): - """ Set variable. """ + """Set variable.""" cmd = 'let %s = %s' % (name, self.prepare_value(value)) self.debug(cmd) vim.command(cmd) def prepare_value(self, value, dumps=True): - """ Decode bstr to vim encoding. + """Decode bstr to vim encoding. :return unicode string: @@ -207,7 +207,7 @@ def prepare_value(self, value, dumps=True): return value def get_offset_params(self, cursor=None, base=""): - """ Calculate current offset. + """Calculate current offset. :return tuple: (source, offset) @@ -228,11 +228,11 @@ def get_offset_params(self, cursor=None, base=""): @staticmethod def goto_line(line): - """ Go to line. """ + """Go to line.""" vim.command('normal %sggzz' % line) def goto_file(self, path, cmd='e', force=False): - """ Function description. """ + """Open file by path.""" if force or os.path.abspath(path) != self.curbuf.name: self.debug('read', path) if ' ' in path and os.name == 'posix': @@ -241,7 +241,7 @@ def goto_file(self, path, cmd='e', force=False): @staticmethod def goto_buffer(bufnr): - """ Open buffer. """ + """Open buffer.""" if str(bufnr) != '-1': vim.command('buffer %s' % bufnr) diff --git a/pymode/libs/_markerlib/__init__.py b/pymode/libs/_markerlib/__init__.py new file mode 100644 index 00000000..e2b237b1 --- /dev/null +++ b/pymode/libs/_markerlib/__init__.py @@ -0,0 +1,16 @@ +try: + import ast + from _markerlib.markers import default_environment, compile, interpret +except ImportError: + if 'ast' in globals(): + raise + def default_environment(): + return {} + def compile(marker): + def marker_fn(environment=None, override=None): + # 'empty markers are True' heuristic won't install extra deps. + return not marker.strip() + marker_fn.__doc__ = marker + return marker_fn + def interpret(marker, environment=None, override=None): + return compile(marker)() diff --git a/pymode/libs/_markerlib/markers.py b/pymode/libs/_markerlib/markers.py new file mode 100644 index 00000000..fa837061 --- /dev/null +++ b/pymode/libs/_markerlib/markers.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +"""Interpret PEP 345 environment markers. + +EXPR [in|==|!=|not in] EXPR [or|and] ... + +where EXPR belongs to any of those: + + python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) + python_full_version = sys.version.split()[0] + os.name = os.name + sys.platform = sys.platform + platform.version = platform.version() + platform.machine = platform.machine() + platform.python_implementation = platform.python_implementation() + a free string, like '2.6', or 'win32' +""" + +__all__ = ['default_environment', 'compile', 'interpret'] + +import ast +import os +import platform +import sys +import weakref + +_builtin_compile = compile + +try: + from platform import python_implementation +except ImportError: + if os.name == "java": + # Jython 2.5 has ast module, but not platform.python_implementation() function. + def python_implementation(): + return "Jython" + else: + raise + + +# restricted set of variables +_VARS = {'sys.platform': sys.platform, + 'python_version': '%s.%s' % sys.version_info[:2], + # FIXME parsing sys.platform is not reliable, but there is no other + # way to get e.g. 2.7.2+, and the PEP is defined with sys.version + 'python_full_version': sys.version.split(' ', 1)[0], + 'os.name': os.name, + 'platform.version': platform.version(), + 'platform.machine': platform.machine(), + 'platform.python_implementation': python_implementation(), + 'extra': None # wheel extension + } + +for var in list(_VARS.keys()): + if '.' in var: + _VARS[var.replace('.', '_')] = _VARS[var] + +def default_environment(): + """Return copy of default PEP 385 globals dictionary.""" + return dict(_VARS) + +class ASTWhitelist(ast.NodeTransformer): + def __init__(self, statement): + self.statement = statement # for error messages + + ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) + # Bool operations + ALLOWED += (ast.And, ast.Or) + # Comparison operations + ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) + + def visit(self, node): + """Ensure statement only contains allowed nodes.""" + if not isinstance(node, self.ALLOWED): + raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % + (self.statement, + (' ' * node.col_offset) + '^')) + return ast.NodeTransformer.visit(self, node) + + def visit_Attribute(self, node): + """Flatten one level of attribute access.""" + new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) + return ast.copy_location(new_node, node) + +def parse_marker(marker): + tree = ast.parse(marker, mode='eval') + new_tree = ASTWhitelist(marker).generic_visit(tree) + return new_tree + +def compile_marker(parsed_marker): + return _builtin_compile(parsed_marker, '', 'eval', + dont_inherit=True) + +_cache = weakref.WeakValueDictionary() + +def compile(marker): + """Return compiled marker as a function accepting an environment dict.""" + try: + return _cache[marker] + except KeyError: + pass + if not marker.strip(): + def marker_fn(environment=None, override=None): + """""" + return True + else: + compiled_marker = compile_marker(parse_marker(marker)) + def marker_fn(environment=None, override=None): + """override updates environment""" + if override is None: + override = {} + if environment is None: + environment = default_environment() + environment.update(override) + return eval(compiled_marker, environment) + marker_fn.__doc__ = marker + _cache[marker] = marker_fn + return _cache[marker] + +def interpret(marker, environment=None): + return compile(marker)(environment) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py b/pymode/libs/astroid/__init__.py similarity index 89% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py rename to pymode/libs/astroid/__init__.py index 19c80902..d4fd12c5 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/__init__.py +++ b/pymode/libs/astroid/__init__.py @@ -79,6 +79,9 @@ class AsStringRegexpPredicate(object): If specified, the second argument is an `attrgetter` expression that will be applied on the node first to get the actual node on which `as_string` should be called. + + WARNING: This can be fairly slow, as it has to convert every AST node back + to Python code; you should consider examining the AST directly instead. """ def __init__(self, regexp, expression=None): self.regexp = re.compile(regexp) @@ -98,13 +101,23 @@ def inference_tip(infer_function): .. sourcecode:: python MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple), - AsStringRegexpPredicate('namedtuple', 'func')) + predicate) """ def transform(node, infer_function=infer_function): node._explicit_inference = infer_function return node return transform + +def register_module_extender(manager, module_name, get_extension_mod): + def transform(node): + extension_module = get_extension_mod() + for name, obj in extension_module.locals.items(): + node.locals[name] = obj + + manager.register_transform(Module, transform, lambda n: n.name == module_name) + + # load brain plugins from os import listdir from os.path import join, dirname diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py b/pymode/libs/astroid/__pkginfo__.py similarity index 78% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py rename to pymode/libs/astroid/__pkginfo__.py index 85398ff1..3fb45aa4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/__pkginfo__.py +++ b/pymode/libs/astroid/__pkginfo__.py @@ -16,33 +16,27 @@ # You should have received a copy of the GNU Lesser General Public License along # with astroid. If not, see . """astroid packaging information""" - distname = 'astroid' modname = 'astroid' -numversion = (1, 1, 1) +numversion = (1, 3, 8) version = '.'.join([str(num) for num in numversion]) -install_requires = ['logilab-common >= 0.60.0'] +install_requires = ['logilab-common>=0.63.0', 'six'] license = 'LGPL' author = 'Logilab' -author_email = 'python-projects@lists.logilab.org' +author_email = 'pylint-dev@lists.logilab.org' mailinglist = "mailto://%s" % author_email web = 'http://bitbucket.org/logilab/astroid' -description = "rebuild a new abstract syntax tree from Python's ast" - -from os.path import join -include_dirs = ['brain', - join('test', 'regrtest_data'), - join('test', 'data'), join('test', 'data2')] +description = "A abstract syntax tree for Python with inference support." classifiers = ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", - ] + ] diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py b/pymode/libs/astroid/as_string.py similarity index 85% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py rename to pymode/libs/astroid/as_string.py index ace1c4e3..f627f9e8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/as_string.py +++ b/pymode/libs/astroid/as_string.py @@ -44,29 +44,29 @@ def _repr_tree(node, result, indent='', _done=None, ids=False): if not hasattr(node, '_astroid_fields'): # not a astroid node return if node in _done: - result.append( indent + 'loop in tree: %s' % node ) + result.append(indent + 'loop in tree: %s' % node) return _done.add(node) node_str = str(node) if ids: node_str += ' . \t%x' % id(node) - result.append( indent + node_str ) + result.append(indent + node_str) indent += INDENT for field in node._astroid_fields: value = getattr(node, field) - if isinstance(value, (list, tuple) ): - result.append( indent + field + " = [" ) + if isinstance(value, (list, tuple)): + result.append(indent + field + " = [") for child in value: - if isinstance(child, (list, tuple) ): + if isinstance(child, (list, tuple)): # special case for Dict # FIXME _repr_tree(child[0], result, indent, _done, ids) _repr_tree(child[1], result, indent, _done, ids) result.append(indent + ',') else: _repr_tree(child, result, indent, _done, ids) - result.append( indent + "]" ) + result.append(indent + "]") else: - result.append( indent + field + " = " ) + result.append(indent + field + " = ") _repr_tree(value, result, indent, _done, ids) @@ -97,7 +97,7 @@ def visit_assert(self, node): """return an astroid.Assert node as string""" if node.fail: return 'assert %s, %s' % (node.test.accept(self), - node.fail.accept(self)) + node.fail.accept(self)) return 'assert %s' % node.test.accept(self) def visit_assname(self, node): @@ -124,7 +124,7 @@ def visit_binop(self, node): def visit_boolop(self, node): """return an astroid.BoolOp node as string""" return (' %s ' % node.op).join(['(%s)' % n.accept(self) - for n in node.values]) + for n in node.values]) def visit_break(self, node): """return an astroid.Break node as string""" @@ -135,20 +135,20 @@ def visit_callfunc(self, node): expr_str = node.func.accept(self) args = [arg.accept(self) for arg in node.args] if node.starargs: - args.append( '*' + node.starargs.accept(self)) + args.append('*' + node.starargs.accept(self)) if node.kwargs: - args.append( '**' + node.kwargs.accept(self)) + args.append('**' + node.kwargs.accept(self)) return '%s(%s)' % (expr_str, ', '.join(args)) def visit_class(self, node): """return an astroid.Class node as string""" decorate = node.decorators and node.decorators.accept(self) or '' - bases = ', '.join([n.accept(self) for n in node.bases]) + bases = ', '.join([n.accept(self) for n in node.bases]) if sys.version_info[0] == 2: bases = bases and '(%s)' % bases or '' else: metaclass = node.metaclass() - if metaclass: + if metaclass and not node.has_metaclass_hack(): if bases: bases = '(%s, metaclass=%s)' % (bases, metaclass.name) else: @@ -157,7 +157,7 @@ def visit_class(self, node): bases = bases and '(%s)' % bases or '' docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, - self._stmt_list( node.body)) + self._stmt_list(node.body)) def visit_compare(self, node): """return an astroid.Compare node as string""" @@ -167,9 +167,9 @@ def visit_compare(self, node): def visit_comprehension(self, node): """return an astroid.Comprehension node as string""" - ifs = ''.join([ ' if %s' % n.accept(self) for n in node.ifs]) + ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs]) return 'for %s in %s%s' % (node.target.accept(self), - node.iter.accept(self), ifs ) + node.iter.accept(self), ifs) def visit_const(self, node): """return an astroid.Const node as string""" @@ -182,7 +182,7 @@ def visit_continue(self, node): def visit_delete(self, node): # XXX check if correct """return an astroid.Delete node as string""" return 'del %s' % ', '.join([child.accept(self) - for child in node.targets]) + for child in node.targets]) def visit_delattr(self, node): """return an astroid.DelAttr node as string""" @@ -199,12 +199,13 @@ def visit_decorators(self, node): def visit_dict(self, node): """return an astroid.Dict node as string""" return '{%s}' % ', '.join(['%s: %s' % (key.accept(self), - value.accept(self)) for key, value in node.items]) + value.accept(self)) + for key, value in node.items]) def visit_dictcomp(self, node): """return an astroid.DictComp node as string""" return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), - ' '.join([n.accept(self) for n in node.generators])) + ' '.join([n.accept(self) for n in node.generators])) def visit_discard(self, node): """return an astroid.Discard node as string""" @@ -218,7 +219,7 @@ def visit_excepthandler(self, node): if node.type: if node.name: excs = 'except %s, %s' % (node.type.accept(self), - node.name.accept(self)) + node.name.accept(self)) else: excs = 'except %s' % node.type.accept(self) else: @@ -246,13 +247,13 @@ def visit_exec(self, node): def visit_extslice(self, node): """return an astroid.ExtSlice node as string""" - return ','.join( [dim.accept(self) for dim in node.dims] ) + return ','.join([dim.accept(self) for dim in node.dims]) def visit_for(self, node): """return an astroid.For node as string""" fors = 'for %s in %s:\n%s' % (node.target.accept(self), - node.iter.accept(self), - self._stmt_list( node.body)) + node.iter.accept(self), + self._stmt_list(node.body)) if node.orelse: fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) return fors @@ -267,12 +268,12 @@ def visit_function(self, node): decorate = node.decorators and node.decorators.accept(self) or '' docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self), - docs, self._stmt_list(node.body)) + docs, self._stmt_list(node.body)) def visit_genexpr(self, node): """return an astroid.GenExpr node as string""" - return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) + return '(%s %s)' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) def visit_getattr(self, node): """return an astroid.Getattr node as string""" @@ -292,7 +293,8 @@ def visit_if(self, node): def visit_ifexp(self, node): """return an astroid.IfExp node as string""" return '%s if %s else %s' % (node.body.accept(self), - node.test.accept(self), node.orelse.accept(self)) + node.test.accept(self), + node.orelse.accept(self)) def visit_import(self, node): """return an astroid.Import node as string""" @@ -304,7 +306,8 @@ def visit_keyword(self, node): def visit_lambda(self, node): """return an astroid.Lambda node as string""" - return 'lambda %s: %s' % (node.args.accept(self), node.body.accept(self)) + return 'lambda %s: %s' % (node.args.accept(self), + node.body.accept(self)) def visit_list(self, node): """return an astroid.List node as string""" @@ -312,8 +315,8 @@ def visit_list(self, node): def visit_listcomp(self, node): """return an astroid.ListComp node as string""" - return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) + return '[%s %s]' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) def visit_module(self, node): """return an astroid.Module node as string""" @@ -343,10 +346,10 @@ def visit_raise(self, node): if node.inst: if node.tback: return 'raise %s, %s, %s' % (node.exc.accept(self), - node.inst.accept(self), - node.tback.accept(self)) + node.inst.accept(self), + node.tback.accept(self)) return 'raise %s, %s' % (node.exc.accept(self), - node.inst.accept(self)) + node.inst.accept(self)) return 'raise %s' % node.exc.accept(self) return 'raise' @@ -367,8 +370,8 @@ def visit_set(self, node): def visit_setcomp(self, node): """return an astroid.SetComp node as string""" - return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) + return '{%s %s}' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) def visit_slice(self, node): """return a astroid.Slice node as string""" @@ -385,7 +388,7 @@ def visit_subscript(self, node): def visit_tryexcept(self, node): """return an astroid.TryExcept node as string""" - trys = ['try:\n%s' % self._stmt_list( node.body)] + trys = ['try:\n%s' % self._stmt_list(node.body)] for handler in node.handlers: trys.append(handler.accept(self)) if node.orelse: @@ -394,13 +397,13 @@ def visit_tryexcept(self, node): def visit_tryfinally(self, node): """return an astroid.TryFinally node as string""" - return 'try:\n%s\nfinally:\n%s' % (self._stmt_list( node.body), - self._stmt_list(node.finalbody)) + return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body), + self._stmt_list(node.finalbody)) def visit_tuple(self, node): """return an astroid.Tuple node as string""" if len(node.elts) == 1: - return '(%s, )' % node.elts[0].accept(self) + return '(%s, )' % node.elts[0].accept(self) return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) def visit_unaryop(self, node): @@ -424,7 +427,7 @@ def visit_with(self, node): # 'with' without 'as' is possible items = ', '.join(('(%s)' % expr.accept(self)) + (vars and ' as (%s)' % (vars.accept(self)) or '') for expr, vars in node.items) - return 'with %s:\n%s' % (items, self._stmt_list( node.body)) + return 'with %s:\n%s' % (items, self._stmt_list(node.body)) def visit_yield(self, node): """yield an ast.Yield node as string""" @@ -443,7 +446,7 @@ def visit_excepthandler(self, node): if node.type: if node.name: excs = 'except %s as %s' % (node.type.accept(self), - node.name.accept(self)) + node.name.accept(self)) else: excs = 'except %s' % node.type.accept(self) else: diff --git a/pymode/libs/astroid/astpeephole.py b/pymode/libs/astroid/astpeephole.py new file mode 100644 index 00000000..af03462a --- /dev/null +++ b/pymode/libs/astroid/astpeephole.py @@ -0,0 +1,86 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""Small AST optimizations.""" + +import _ast + +from astroid import nodes + + +__all__ = ('ASTPeepholeOptimizer', ) + + +try: + _TYPES = (_ast.Str, _ast.Bytes) +except AttributeError: + _TYPES = (_ast.Str, ) + + +class ASTPeepholeOptimizer(object): + """Class for applying small optimizations to generate new AST.""" + + def optimize_binop(self, node): + """Optimize BinOps with string Const nodes on the lhs. + + This fixes an infinite recursion crash, where multiple + strings are joined using the addition operator. With a + sufficient number of such strings, astroid will fail + with a maximum recursion limit exceeded. The + function will return a Const node with all the strings + already joined. + Return ``None`` if no AST node can be obtained + through optimization. + """ + ast_nodes = [] + current = node + while isinstance(current, _ast.BinOp): + # lhs must be a BinOp with the addition operand. + if not isinstance(current.left, _ast.BinOp): + return + if (not isinstance(current.left.op, _ast.Add) + or not isinstance(current.op, _ast.Add)): + return + + # rhs must a str / bytes. + if not isinstance(current.right, _TYPES): + return + + ast_nodes.append(current.right.s) + current = current.left + + if (isinstance(current, _ast.BinOp) + and isinstance(current.left, _TYPES) + and isinstance(current.right, _TYPES)): + # Stop early if we are at the last BinOp in + # the operation + ast_nodes.append(current.right.s) + ast_nodes.append(current.left.s) + break + + if not ast_nodes: + return + + # If we have inconsistent types, bail out. + known = type(ast_nodes[0]) + if any(type(element) is not known + for element in ast_nodes[1:]): + return + + value = known().join(reversed(ast_nodes)) + newnode = nodes.Const(value) + return newnode diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py b/pymode/libs/astroid/bases.py similarity index 89% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py rename to pymode/libs/astroid/bases.py index 5ee11b3b..ee8ee1c3 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/bases.py +++ b/pymode/libs/astroid/bases.py @@ -24,6 +24,8 @@ import sys from contextlib import contextmanager +from logilab.common.decorators import cachedproperty + from astroid.exceptions import (InferenceError, AstroidError, NotFoundError, UnresolvableName, UseInferenceDefault) @@ -57,30 +59,37 @@ def infer(self, context=None): # Inference ################################################################## class InferenceContext(object): - __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode') + __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'infered') - def __init__(self, path=None): - if path is None: - self.path = set() - else: - self.path = path + def __init__(self, path=None, infered=None): + self.path = path or set() self.lookupname = None self.callcontext = None self.boundnode = None + self.infered = infered or {} def push(self, node): name = self.lookupname if (node, name) in self.path: raise StopIteration() - self.path.add( (node, name) ) + self.path.add((node, name)) def clone(self): # XXX copy lookupname/callcontext ? - clone = InferenceContext(self.path) + clone = InferenceContext(self.path, infered=self.infered) clone.callcontext = self.callcontext clone.boundnode = self.boundnode return clone + def cache_generator(self, key, generator): + results = [] + for result in generator: + results.append(result) + yield result + + self.infered[key] = tuple(results) + return + @contextmanager def restore_path(self): path = set(self.path) @@ -170,14 +179,19 @@ def getattr(self, name, context=None, lookupclass=True): def igetattr(self, name, context=None): """inferred getattr""" + if not context: + context = InferenceContext() try: # avoid recursively inferring the same attr on the same class - if context: - context.push((self._proxied, name)) + + context.push((self._proxied, name)) # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) - return _infer_stmts(self._wrap_attr(get_attr, context), context, - frame=self) + return _infer_stmts( + self._wrap_attr(get_attr, context), + context, + frame=self, + ) except NotFoundError: try: # fallback to class'igetattr since it has some logic to handle @@ -259,14 +273,14 @@ def infer_call_result(self, caller, context): # instance of the class given as first argument. if (self._proxied.name == '__new__' and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): - return ((x is YES and x or Instance(x)) - for x in caller.args[0].infer()) + infer = caller.args[0].infer() if caller.args else [] + return ((x is YES and x or Instance(x)) for x in infer) return self._proxied.infer_call_result(caller, context) class BoundMethod(UnboundMethod): """a special node representing a method bound to an instance""" - def __init__(self, proxy, bound): + def __init__(self, proxy, bound): UnboundMethod.__init__(self, proxy) self.bound = bound @@ -377,7 +391,16 @@ def infer(self, context=None, **kwargs): return self._explicit_inference(self, context, **kwargs) except UseInferenceDefault: pass - return self._infer(context, **kwargs) + + if not context: + return self._infer(context, **kwargs) + + key = (self, context.lookupname, + context.callcontext, context.boundnode) + if key in context.infered: + return iter(context.infered[key]) + + return context.cache_generator(key, self._infer(context, **kwargs)) def _repr_name(self): """return self.name or self.attrname or '' for nice representation""" @@ -387,15 +410,14 @@ def __str__(self): return '%s(%s)' % (self.__class__.__name__, self._repr_name()) def __repr__(self): - return '<%s(%s) l.%s [%s] at Ox%x>' % (self.__class__.__name__, - self._repr_name(), - self.fromlineno, - self.root().name, - id(self)) + return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__, + self._repr_name(), + self.fromlineno, + self.root().name, + id(self)) def accept(self, visitor): - klass = self.__class__.__name__ func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) return func(self) @@ -416,7 +438,7 @@ def last_child(self): attr = getattr(self, field) if not attr: # None or empty listy / tuple continue - if isinstance(attr, (list, tuple)): + if attr.__class__ in (list, tuple): return attr[-1] else: return attr @@ -507,16 +529,28 @@ def nearest(self, nodes): # FIXME: raise an exception if nearest is None ? return nearest[0] - def set_line_info(self, lastchild): + # these are lazy because they're relatively expensive to compute for every + # single node, and they rarely get looked at + + @cachedproperty + def fromlineno(self): if self.lineno is None: - self.fromlineno = self._fixed_source_line() + return self._fixed_source_line() else: - self.fromlineno = self.lineno + return self.lineno + + @cachedproperty + def tolineno(self): + if not self._astroid_fields: + # can't have children + lastchild = None + else: + lastchild = self.last_child() if lastchild is None: - self.tolineno = self.fromlineno + return self.fromlineno else: - self.tolineno = lastchild.tolineno - return + return lastchild.tolineno + # TODO / FIXME: assert self.fromlineno is not None, self assert self.tolineno is not None, self @@ -531,7 +565,7 @@ def _fixed_source_line(self): _node = self try: while line is None: - _node = _node.get_children().next() + _node = next(_node.get_children()) line = _node.lineno except StopIteration: _node = self.parent diff --git a/pymode/libs/astroid/brain/builtin_inference.py b/pymode/libs/astroid/brain/builtin_inference.py new file mode 100644 index 00000000..f60e7913 --- /dev/null +++ b/pymode/libs/astroid/brain/builtin_inference.py @@ -0,0 +1,245 @@ +"""Astroid hooks for various builtins.""" + +import sys +from functools import partial +from textwrap import dedent + +import six +from astroid import (MANAGER, UseInferenceDefault, + inference_tip, YES, InferenceError, UnresolvableName) +from astroid import nodes +from astroid.builder import AstroidBuilder + + +def _extend_str(class_node, rvalue): + """function to extend builtin str/unicode class""" + # TODO(cpopa): this approach will make astroid to believe + # that some arguments can be passed by keyword, but + # unfortunately, strings and bytes don't accept keyword arguments. + code = dedent(''' + class whatever(object): + def join(self, iterable): + return {rvalue} + def replace(self, old, new, count=None): + return {rvalue} + def format(self, *args, **kwargs): + return {rvalue} + def encode(self, encoding='ascii', errors=None): + return '' + def decode(self, encoding='ascii', errors=None): + return u'' + def capitalize(self): + return {rvalue} + def title(self): + return {rvalue} + def lower(self): + return {rvalue} + def upper(self): + return {rvalue} + def swapcase(self): + return {rvalue} + def index(self, sub, start=None, end=None): + return 0 + def find(self, sub, start=None, end=None): + return 0 + def count(self, sub, start=None, end=None): + return 0 + def strip(self, chars=None): + return {rvalue} + def lstrip(self, chars=None): + return {rvalue} + def rstrip(self, chars=None): + return {rvalue} + def rjust(self, width, fillchar=None): + return {rvalue} + def center(self, width, fillchar=None): + return {rvalue} + def ljust(self, width, fillchar=None): + return {rvalue} + ''') + code = code.format(rvalue=rvalue) + fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] + for method in fake.mymethods(): + class_node.locals[method.name] = [method] + method.parent = class_node + +def extend_builtins(class_transforms): + from astroid.bases import BUILTINS + builtin_ast = MANAGER.astroid_cache[BUILTINS] + for class_name, transform in class_transforms.items(): + transform(builtin_ast[class_name]) + +if sys.version_info > (3, 0): + extend_builtins({'bytes': partial(_extend_str, rvalue="b''"), + 'str': partial(_extend_str, rvalue="''")}) +else: + extend_builtins({'str': partial(_extend_str, rvalue="''"), + 'unicode': partial(_extend_str, rvalue="u''")}) + + +def register_builtin_transform(transform, builtin_name): + """Register a new transform function for the given *builtin_name*. + + The transform function must accept two parameters, a node and + an optional context. + """ + def _transform_wrapper(node, context=None): + result = transform(node, context=context) + if result: + result.parent = node + result.lineno = node.lineno + result.col_offset = node.col_offset + return iter([result]) + + MANAGER.register_transform(nodes.CallFunc, + inference_tip(_transform_wrapper), + lambda n: (isinstance(n.func, nodes.Name) and + n.func.name == builtin_name)) + + +def _generic_inference(node, context, node_type, transform): + args = node.args + if not args: + return node_type() + if len(node.args) > 1: + raise UseInferenceDefault() + + arg, = args + transformed = transform(arg) + if not transformed: + try: + infered = next(arg.infer(context=context)) + except (InferenceError, StopIteration): + raise UseInferenceDefault() + if infered is YES: + raise UseInferenceDefault() + transformed = transform(infered) + if not transformed or transformed is YES: + raise UseInferenceDefault() + return transformed + + +def _generic_transform(arg, klass, iterables, build_elts): + if isinstance(arg, klass): + return arg + elif isinstance(arg, iterables): + if not all(isinstance(elt, nodes.Const) + for elt in arg.elts): + # TODO(cpopa): Don't support heterogenous elements. + # Not yet, though. + raise UseInferenceDefault() + elts = [elt.value for elt in arg.elts] + elif isinstance(arg, nodes.Dict): + if not all(isinstance(elt[0], nodes.Const) + for elt in arg.items): + raise UseInferenceDefault() + elts = [item[0].value for item in arg.items] + elif (isinstance(arg, nodes.Const) and + isinstance(arg.value, (six.string_types, six.binary_type))): + elts = arg.value + else: + return + return klass(elts=build_elts(elts)) + + +def _infer_builtin(node, context, + klass=None, iterables=None, + build_elts=None): + transform_func = partial( + _generic_transform, + klass=klass, + iterables=iterables, + build_elts=build_elts) + + return _generic_inference(node, context, klass, transform_func) + +# pylint: disable=invalid-name +infer_tuple = partial( + _infer_builtin, + klass=nodes.Tuple, + iterables=(nodes.List, nodes.Set), + build_elts=tuple) + +infer_list = partial( + _infer_builtin, + klass=nodes.List, + iterables=(nodes.Tuple, nodes.Set), + build_elts=list) + +infer_set = partial( + _infer_builtin, + klass=nodes.Set, + iterables=(nodes.List, nodes.Tuple), + build_elts=set) + + +def _get_elts(arg, context): + is_iterable = lambda n: isinstance(n, + (nodes.List, nodes.Tuple, nodes.Set)) + try: + infered = next(arg.infer(context)) + except (InferenceError, UnresolvableName): + raise UseInferenceDefault() + if isinstance(infered, nodes.Dict): + items = infered.items + elif is_iterable(infered): + items = [] + for elt in infered.elts: + # If an item is not a pair of two items, + # then fallback to the default inference. + # Also, take in consideration only hashable items, + # tuples and consts. We are choosing Names as well. + if not is_iterable(elt): + raise UseInferenceDefault() + if len(elt.elts) != 2: + raise UseInferenceDefault() + if not isinstance(elt.elts[0], + (nodes.Tuple, nodes.Const, nodes.Name)): + raise UseInferenceDefault() + items.append(tuple(elt.elts)) + else: + raise UseInferenceDefault() + return items + +def infer_dict(node, context=None): + """Try to infer a dict call to a Dict node. + + The function treats the following cases: + + * dict() + * dict(mapping) + * dict(iterable) + * dict(iterable, **kwargs) + * dict(mapping, **kwargs) + * dict(**kwargs) + + If a case can't be infered, we'll fallback to default inference. + """ + has_keywords = lambda args: all(isinstance(arg, nodes.Keyword) + for arg in args) + if not node.args and not node.kwargs: + # dict() + return nodes.Dict() + elif has_keywords(node.args) and node.args: + # dict(a=1, b=2, c=4) + items = [(nodes.Const(arg.arg), arg.value) for arg in node.args] + elif (len(node.args) >= 2 and + has_keywords(node.args[1:])): + # dict(some_iterable, b=2, c=4) + elts = _get_elts(node.args[0], context) + keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]] + items = elts + keys + elif len(node.args) == 1: + items = _get_elts(node.args[0], context) + else: + raise UseInferenceDefault() + + empty = nodes.Dict() + empty.items = items + return empty + +# Builtins inference +register_builtin_transform(infer_tuple, 'tuple') +register_builtin_transform(infer_set, 'set') +register_builtin_transform(infer_list, 'list') +register_builtin_transform(infer_dict, 'dict') diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py b/pymode/libs/astroid/brain/py2gi.py similarity index 75% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py rename to pymode/libs/astroid/brain/py2gi.py index dd9868db..6747898d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2gi.py +++ b/pymode/libs/astroid/brain/py2gi.py @@ -4,6 +4,7 @@ """ import inspect +import itertools import sys import re @@ -111,40 +112,33 @@ def _gi_build_stub(parent): return ret -# Overwrite Module.module_import to _actually_ import the introspected module if -# it's a gi module, then build stub code by examining its info and get an astng -# from that - -from astroid.scoped_nodes import Module -_orig_import_module = Module.import_module - -def _new_import_module(self, modname, relative_only=False, level=None): - # Could be a static piece of gi.repository or whatever unrelated module, - # let that fall through - try: - return _orig_import_module(self, modname, relative_only, level) - except AstroidBuildingException: - # we only consider gi.repository submodules - if not modname.startswith('gi.repository.'): - if relative_only and level is None: - level = 0 - modname = self.relative_to_absolute_name(modname, level) - if not modname.startswith('gi.repository.'): - raise +def _import_gi_module(modname): + # we only consider gi.repository submodules + if not modname.startswith('gi.repository.'): + raise AstroidBuildingException() # build astroid representation unless we already tried so if modname not in _inspected_modules: modnames = [modname] - # GLib and GObject have some special case handling - # in pygobject that we need to cope with + optional_modnames = [] + + # GLib and GObject may have some special case handling + # in pygobject that we need to cope with. However at + # least as of pygobject3-3.13.91 the _glib module doesn't + # exist anymore, so if treat these modules as optional. if modname == 'gi.repository.GLib': - modnames.append('gi._glib') + optional_modnames.append('gi._glib') elif modname == 'gi.repository.GObject': - modnames.append('gi._gobject') + optional_modnames.append('gi._gobject') + try: modcode = '' - for m in modnames: - __import__(m) - modcode += _gi_build_stub(sys.modules[m]) + for m in itertools.chain(modnames, optional_modnames): + try: + __import__(m) + modcode += _gi_build_stub(sys.modules[m]) + except ImportError: + if m not in optional_modnames: + raise except ImportError: astng = _inspected_modules[modname] = None else: @@ -156,4 +150,6 @@ def _new_import_module(self, modname, relative_only=False, level=None): raise AstroidBuildingException('Failed to import module %r' % modname) return astng -Module.import_module = _new_import_module + +MANAGER.register_failed_import_hook(_import_gi_module) + diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py b/pymode/libs/astroid/brain/py2mechanize.py similarity index 53% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py rename to pymode/libs/astroid/brain/py2mechanize.py index 1e0b102d..20a253a4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2mechanize.py +++ b/pymode/libs/astroid/brain/py2mechanize.py @@ -1,8 +1,8 @@ -from astroid import MANAGER +from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder -def mechanize_transform(module): - fake = AstroidBuilder(MANAGER).string_build(''' +def mechanize_transform(): + return AstroidBuilder(MANAGER).string_build(''' class Browser(object): def open(self, url, data=None, timeout=None): @@ -13,8 +13,6 @@ def open_local_file(self, filename): return None ''') - module.locals['Browser'] = fake.locals['Browser'] -import py2stdlib -py2stdlib.MODULE_TRANSFORMS['mechanize'] = mechanize_transform +register_module_extender(MANAGER, 'mechanize', mechanize_transform) diff --git a/pymode/libs/astroid/brain/py2pytest.py b/pymode/libs/astroid/brain/py2pytest.py new file mode 100644 index 00000000..e24d449c --- /dev/null +++ b/pymode/libs/astroid/brain/py2pytest.py @@ -0,0 +1,31 @@ +"""Astroid hooks for pytest.""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + + +def pytest_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +try: + import _pytest.mark + import _pytest.recwarn + import _pytest.runner + import _pytest.python +except ImportError: + pass +else: + deprecated_call = _pytest.recwarn.deprecated_call + exit = _pytest.runner.exit + fail = _pytest.runner.fail + fixture = _pytest.python.fixture + importorskip = _pytest.runner.importorskip + mark = _pytest.mark.MarkGenerator() + raises = _pytest.python.raises + skip = _pytest.runner.skip + yield_fixture = _pytest.python.yield_fixture + +''') + +register_module_extender(MANAGER, 'pytest', pytest_transform) +register_module_extender(MANAGER, 'py.test', pytest_transform) diff --git a/pymode/libs/astroid/brain/py2qt4.py b/pymode/libs/astroid/brain/py2qt4.py new file mode 100644 index 00000000..d5578097 --- /dev/null +++ b/pymode/libs/astroid/brain/py2qt4.py @@ -0,0 +1,22 @@ +"""Astroid hooks for the Python 2 qt4 module. + +Currently help understanding of : + +* PyQT4.QtCore +""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + + +def pyqt4_qtcore_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +def SIGNAL(signal_name): pass + +class QObject(object): + def emit(self, signal): pass +''') + + +register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) diff --git a/pymode/libs/astroid/brain/py2stdlib.py b/pymode/libs/astroid/brain/py2stdlib.py new file mode 100644 index 00000000..2bfcbcd3 --- /dev/null +++ b/pymode/libs/astroid/brain/py2stdlib.py @@ -0,0 +1,334 @@ + +"""Astroid hooks for the Python 2 standard library. + +Currently help understanding of : + +* hashlib.md5 and hashlib.sha1 +""" + +import sys +from functools import partial +from textwrap import dedent + +from astroid import ( + MANAGER, AsStringRegexpPredicate, + UseInferenceDefault, inference_tip, + YES, InferenceError, register_module_extender) +from astroid import exceptions +from astroid import nodes +from astroid.builder import AstroidBuilder + +PY3K = sys.version_info > (3, 0) +PY33 = sys.version_info >= (3, 3) + +# general function + +def infer_func_form(node, base_type, context=None, enum=False): + """Specific inference function for namedtuple or Python 3 enum. """ + def infer_first(node): + try: + value = next(node.infer(context=context)) + if value is YES: + raise UseInferenceDefault() + else: + return value + except StopIteration: + raise InferenceError() + + # node is a CallFunc node, class name as first argument and generated class + # attributes as second argument + if len(node.args) != 2: + # something weird here, go back to class implementation + raise UseInferenceDefault() + # namedtuple or enums list of attributes can be a list of strings or a + # whitespace-separate string + try: + name = infer_first(node.args[0]).value + names = infer_first(node.args[1]) + try: + attributes = names.value.replace(',', ' ').split() + except AttributeError: + if not enum: + attributes = [infer_first(const).value for const in names.elts] + else: + # Enums supports either iterator of (name, value) pairs + # or mappings. + # TODO: support only list, tuples and mappings. + if hasattr(names, 'items') and isinstance(names.items, list): + attributes = [infer_first(const[0]).value + for const in names.items + if isinstance(const[0], nodes.Const)] + elif hasattr(names, 'elts'): + # Enums can support either ["a", "b", "c"] + # or [("a", 1), ("b", 2), ...], but they can't + # be mixed. + if all(isinstance(const, nodes.Tuple) + for const in names.elts): + attributes = [infer_first(const.elts[0]).value + for const in names.elts + if isinstance(const, nodes.Tuple)] + else: + attributes = [infer_first(const).value + for const in names.elts] + else: + raise AttributeError + if not attributes: + raise AttributeError + except (AttributeError, exceptions.InferenceError) as exc: + raise UseInferenceDefault() + # we want to return a Class node instance with proper attributes set + class_node = nodes.Class(name, 'docstring') + class_node.parent = node.parent + # set base class=tuple + class_node.bases.append(base_type) + # XXX add __init__(*attributes) method + for attr in attributes: + fake_node = nodes.EmptyNode() + fake_node.parent = class_node + class_node.instance_attrs[attr] = [fake_node] + return class_node, name, attributes + + +# module specific transformation functions ##################################### + +def hashlib_transform(): + template = ''' + +class %(name)s(object): + def __init__(self, value=''): pass + def digest(self): + return %(digest)s + def copy(self): + return self + def update(self, value): pass + def hexdigest(self): + return '' + @property + def name(self): + return %(name)r + @property + def block_size(self): + return 1 + @property + def digest_size(self): + return 1 +''' + algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + classes = "".join( + template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} + for hashfunc in algorithms) + return AstroidBuilder(MANAGER).string_build(classes) + + +def collections_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +class defaultdict(dict): + default_factory = None + def __missing__(self, key): pass + +class deque(object): + maxlen = 0 + def __init__(self, iterable=None, maxlen=None): pass + def append(self, x): pass + def appendleft(self, x): pass + def clear(self): pass + def count(self, x): return 0 + def extend(self, iterable): pass + def extendleft(self, iterable): pass + def pop(self): pass + def popleft(self): pass + def remove(self, value): pass + def reverse(self): pass + def rotate(self, n): pass + def __iter__(self): return self + +''') + + +def pkg_resources_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +def resource_exists(package_or_requirement, resource_name): + pass + +def resource_isdir(package_or_requirement, resource_name): + pass + +def resource_filename(package_or_requirement, resource_name): + pass + +def resource_stream(package_or_requirement, resource_name): + pass + +def resource_string(package_or_requirement, resource_name): + pass + +def resource_listdir(package_or_requirement, resource_name): + pass + +def extraction_error(): + pass + +def get_cache_path(archive_name, names=()): + pass + +def postprocess(tempname, filename): + pass + +def set_extraction_path(path): + pass + +def cleanup_resources(force=False): + pass + +''') + + +def subprocess_transform(): + if PY3K: + communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0, restore_signals=True, + start_new_session=False, pass_fds=()): + pass + """ + else: + communicate = ('string', 'string') + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + pass + """ + if PY33: + wait_signature = 'def wait(self, timeout=None)' + else: + wait_signature = 'def wait(self)' + return AstroidBuilder(MANAGER).string_build(''' + +class Popen(object): + returncode = pid = 0 + stdin = stdout = stderr = file() + + %(init)s + + def communicate(self, input=None): + return %(communicate)r + %(wait_signature)s: + return self.returncode + def poll(self): + return self.returncode + def send_signal(self, signal): + pass + def terminate(self): + pass + def kill(self): + pass + ''' % {'init': init, + 'communicate': communicate, + 'wait_signature': wait_signature}) + + +# namedtuple support ########################################################### + +def looks_like_namedtuple(node): + func = node.func + if type(func) is nodes.Getattr: + return func.attrname == 'namedtuple' + if type(func) is nodes.Name: + return func.name == 'namedtuple' + return False + +def infer_named_tuple(node, context=None): + """Specific inference function for namedtuple CallFunc node""" + class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, + context=context) + fake = AstroidBuilder(MANAGER).string_build(''' +class %(name)s(tuple): + _fields = %(fields)r + def _asdict(self): + return self.__dict__ + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + return new(cls, iterable) + def _replace(_self, **kwds): + result = _self._make(map(kwds.pop, %(fields)r, _self)) + if kwds: + raise ValueError('Got unexpected field names: %%r' %% list(kwds)) + return result + ''' % {'name': name, 'fields': attributes}) + class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] + class_node.locals['_make'] = fake.body[0].locals['_make'] + class_node.locals['_replace'] = fake.body[0].locals['_replace'] + class_node.locals['_fields'] = fake.body[0].locals['_fields'] + # we use UseInferenceDefault, we can't be a generator so return an iterator + return iter([class_node]) + +def infer_enum(node, context=None): + """ Specific inference function for enum CallFunc node. """ + enum_meta = nodes.Class("EnumMeta", 'docstring') + class_node = infer_func_form(node, enum_meta, + context=context, enum=True)[0] + return iter([class_node.instanciate_class()]) + +def infer_enum_class(node): + """ Specific inference for enums. """ + names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) + for basename in node.basenames: + # TODO: doesn't handle subclasses yet. This implementation + # is a hack to support enums. + if basename not in names: + continue + if node.root().name == 'enum': + # Skip if the class is directly from enum module. + break + for local, values in node.locals.items(): + if any(not isinstance(value, nodes.AssName) + for value in values): + continue + + stmt = values[0].statement() + if isinstance(stmt.targets[0], nodes.Tuple): + targets = stmt.targets[0].itered() + else: + targets = stmt.targets + + new_targets = [] + for target in targets: + # Replace all the assignments with our mocked class. + classdef = dedent(''' + class %(name)s(object): + @property + def value(self): + # Not the best return. + return None + @property + def name(self): + return %(name)r + ''' % {'name': target.name}) + fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] + fake.parent = target.parent + for method in node.mymethods(): + fake.locals[method.name] = [method] + new_targets.append(fake.instanciate_class()) + node.locals[local] = new_targets + break + return node + + +MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple), + looks_like_namedtuple) +MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum), + AsStringRegexpPredicate('Enum', 'func')) +MANAGER.register_transform(nodes.Class, infer_enum_class) +register_module_extender(MANAGER, 'hashlib', hashlib_transform) +register_module_extender(MANAGER, 'collections', collections_transform) +register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) +register_module_extender(MANAGER, 'subprocess', subprocess_transform) diff --git a/pymode/libs/astroid/brain/pynose.py b/pymode/libs/astroid/brain/pynose.py new file mode 100644 index 00000000..67a6fb8f --- /dev/null +++ b/pymode/libs/astroid/brain/pynose.py @@ -0,0 +1,79 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Hooks for nose library.""" + +import re +import textwrap + +import astroid +import astroid.builder + +_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER) + + +def _pep8(name, caps=re.compile('([A-Z])')): + return caps.sub(lambda m: '_' + m.groups()[0].lower(), name) + + +def _nose_tools_functions(): + """Get an iterator of names and bound methods.""" + module = _BUILDER.string_build(textwrap.dedent(''' + import unittest + + class Test(unittest.TestCase): + pass + a = Test() + ''')) + try: + case = next(module['a'].infer()) + except astroid.InferenceError: + return + for method in case.methods(): + if method.name.startswith('assert') and '_' not in method.name: + pep8_name = _pep8(method.name) + yield pep8_name, astroid.BoundMethod(method, case) + + +def _nose_tools_transform(node): + for method_name, method in _nose_tools_functions(): + node.locals[method_name] = [method] + + +def _nose_tools_trivial_transform(): + """Custom transform for the nose.tools module.""" + stub = _BUILDER.string_build('''__all__ = []''') + all_entries = ['ok_', 'eq_'] + + for pep8_name, method in _nose_tools_functions(): + all_entries.append(pep8_name) + stub[pep8_name] = method + + # Update the __all__ variable, since nose.tools + # does this manually with .append. + all_assign = stub['__all__'].parent + all_object = astroid.List(all_entries) + all_object.parent = all_assign + all_assign.value = all_object + return stub + + +astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial', + _nose_tools_trivial_transform) +astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform, + lambda n: n.name == 'nose.tools') diff --git a/pymode/libs/astroid/brain/pysix_moves.py b/pymode/libs/astroid/brain/pysix_moves.py new file mode 100644 index 00000000..548d9761 --- /dev/null +++ b/pymode/libs/astroid/brain/pysix_moves.py @@ -0,0 +1,261 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Astroid hooks for six.moves.""" + +import sys +from textwrap import dedent + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid.exceptions import AstroidBuildingException + +def _indent(text, prefix, predicate=None): + """Adds 'prefix' to the beginning of selected lines in 'text'. + + If 'predicate' is provided, 'prefix' will only be added to the lines + where 'predicate(line)' is True. If 'predicate' is not provided, + it will default to adding 'prefix' to all non-empty lines that do not + consist solely of whitespace characters. + """ + if predicate is None: + predicate = lambda line: line.strip() + + def prefixed_lines(): + for line in text.splitlines(True): + yield prefix + line if predicate(line) else line + return ''.join(prefixed_lines()) + + +if sys.version_info[0] == 2: + _IMPORTS_2 = """ + import BaseHTTPServer + import CGIHTTPServer + import SimpleHTTPServer + + from StringIO import StringIO + from cStringIO import StringIO as cStringIO + from UserDict import UserDict + from UserList import UserList + from UserString import UserString + + import __builtin__ as builtins + import thread as _thread + import dummy_thread as _dummy_thread + import ConfigParser as configparser + import copy_reg as copyreg + from itertools import (imap as map, + ifilter as filter, + ifilterfalse as filterfalse, + izip_longest as zip_longest, + izip as zip) + import htmlentitydefs as html_entities + import HTMLParser as html_parser + import httplib as http_client + import cookielib as http_cookiejar + import Cookie as http_cookies + import Queue as queue + import repr as reprlib + from pipes import quote as shlex_quote + import SocketServer as socketserver + import SimpleXMLRPCServer as xmlrpc_server + import xmlrpclib as xmlrpc_client + import _winreg as winreg + import robotparser as urllib_robotparser + import Tkinter as tkinter + import tkFileDialog as tkinter_tkfiledialog + + input = raw_input + intern = intern + range = xrange + xrange = xrange + reduce = reduce + reload_module = reload + + class UrllibParse(object): + import urlparse as _urlparse + import urllib as _urllib + ParseResult = _urlparse.ParseResult + SplitResult = _urlparse.SplitResult + parse_qs = _urlparse.parse_qs + parse_qsl = _urlparse.parse_qsl + urldefrag = _urlparse.urldefrag + urljoin = _urlparse.urljoin + urlparse = _urlparse.urlparse + urlsplit = _urlparse.urlsplit + urlunparse = _urlparse.urlunparse + urlunsplit = _urlparse.urlunsplit + quote = _urllib.quote + quote_plus = _urllib.quote_plus + unquote = _urllib.unquote + unquote_plus = _urllib.unquote_plus + urlencode = _urllib.urlencode + splitquery = _urllib.splitquery + splittag = _urllib.splittag + splituser = _urllib.splituser + uses_fragment = _urlparse.uses_fragment + uses_netloc = _urlparse.uses_netloc + uses_params = _urlparse.uses_params + uses_query = _urlparse.uses_query + uses_relative = _urlparse.uses_relative + + class UrllibError(object): + import urllib2 as _urllib2 + import urllib as _urllib + URLError = _urllib2.URLError + HTTPError = _urllib2.HTTPError + ContentTooShortError = _urllib.ContentTooShortError + + class DummyModule(object): + pass + + class UrllibRequest(object): + import urlparse as _urlparse + import urllib2 as _urllib2 + import urllib as _urllib + urlopen = _urllib2.urlopen + install_opener = _urllib2.install_opener + build_opener = _urllib2.build_opener + pathname2url = _urllib.pathname2url + url2pathname = _urllib.url2pathname + getproxies = _urllib.getproxies + Request = _urllib2.Request + OpenerDirector = _urllib2.OpenerDirector + HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler + HTTPRedirectHandler = _urllib2.HTTPRedirectHandler + HTTPCookieProcessor = _urllib2.HTTPCookieProcessor + ProxyHandler = _urllib2.ProxyHandler + BaseHandler = _urllib2.BaseHandler + HTTPPasswordMgr = _urllib2.HTTPPasswordMgr + HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm + AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler + HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler + ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler + AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler + HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler + ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler + HTTPHandler = _urllib2.HTTPHandler + HTTPSHandler = _urllib2.HTTPSHandler + FileHandler = _urllib2.FileHandler + FTPHandler = _urllib2.FTPHandler + CacheFTPHandler = _urllib2.CacheFTPHandler + UnknownHandler = _urllib2.UnknownHandler + HTTPErrorProcessor = _urllib2.HTTPErrorProcessor + urlretrieve = _urllib.urlretrieve + urlcleanup = _urllib.urlcleanup + proxy_bypass = _urllib.proxy_bypass + + urllib_parse = UrllibParse() + urllib_error = UrllibError() + urllib = DummyModule() + urllib.request = UrllibRequest() + urllib.parse = UrllibParse() + urllib.error = UrllibError() + """ +else: + _IMPORTS_3 = """ + import _io + cStringIO = _io.StringIO + filter = filter + from itertools import filterfalse + input = input + from sys import intern + map = map + range = range + from imp import reload as reload_module + from functools import reduce + from shlex import quote as shlex_quote + from io import StringIO + from collections import UserDict, UserList, UserString + xrange = range + zip = zip + from itertools import zip_longest + import builtins + import configparser + import copyreg + import _dummy_thread + import http.cookiejar as http_cookiejar + import http.cookies as http_cookies + import html.entities as html_entities + import html.parser as html_parser + import http.client as http_client + import http.server + BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server + import pickle as cPickle + import queue + import reprlib + import socketserver + import _thread + import winreg + import xmlrpc.server as xmlrpc_server + import xmlrpc.client as xmlrpc_client + import urllib.robotparser as urllib_robotparser + import email.mime.multipart as email_mime_multipart + import email.mime.nonmultipart as email_mime_nonmultipart + import email.mime.text as email_mime_text + import email.mime.base as email_mime_base + import urllib.parse as urllib_parse + import urllib.error as urllib_error + import tkinter + import tkinter.dialog as tkinter_dialog + import tkinter.filedialog as tkinter_filedialog + import tkinter.scrolledtext as tkinter_scrolledtext + import tkinter.simpledialog as tkinder_simpledialog + import tkinter.tix as tkinter_tix + import tkinter.ttk as tkinter_ttk + import tkinter.constants as tkinter_constants + import tkinter.dnd as tkinter_dnd + import tkinter.colorchooser as tkinter_colorchooser + import tkinter.commondialog as tkinter_commondialog + import tkinter.filedialog as tkinter_tkfiledialog + import tkinter.font as tkinter_font + import tkinter.messagebox as tkinter_messagebox + import urllib.request + import urllib.robotparser as urllib_robotparser + import urllib.parse as urllib_parse + import urllib.error as urllib_error + """ +if sys.version_info[0] == 2: + _IMPORTS = dedent(_IMPORTS_2) +else: + _IMPORTS = dedent(_IMPORTS_3) + + +def six_moves_transform(): + code = dedent(''' + class Moves(object): + {} + moves = Moves() + ''').format(_indent(_IMPORTS, " ")) + module = AstroidBuilder(MANAGER).string_build(code) + module.name = 'six.moves' + return module + + +def _six_fail_hook(modname): + if modname != 'six.moves': + raise AstroidBuildingException + module = AstroidBuilder(MANAGER).string_build(_IMPORTS) + module.name = 'six.moves' + return module + + +register_module_extender(MANAGER, 'six', six_moves_transform) +register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six', + six_moves_transform) +MANAGER.register_failed_import_hook(_six_fail_hook) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py b/pymode/libs/astroid/builder.py similarity index 85% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py rename to pymode/libs/astroid/builder.py index b6ceff82..1fe7a36d 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/builder.py +++ b/pymode/libs/astroid/builder.py @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of astroid. @@ -20,19 +20,19 @@ The builder is not thread safe and can't be used to parse different sources at the same time. """ +from __future__ import with_statement __docformat__ = "restructuredtext en" import sys from os.path import splitext, basename, exists, abspath -from logilab.common.modutils import modpath_from_file - from astroid.exceptions import AstroidBuildingException, InferenceError from astroid.raw_building import InspectBuilder from astroid.rebuilder import TreeRebuilder from astroid.manager import AstroidManager from astroid.bases import YES, Instance +from astroid.modutils import modpath_from_file from _ast import PyCF_ONLY_AST def parse(string): @@ -42,13 +42,12 @@ def parse(string): from tokenize import detect_encoding def open_source_file(filename): - byte_stream = open(filename, 'bU') - encoding = detect_encoding(byte_stream.readline)[0] - byte_stream.close() - stream = open(filename, 'U', encoding=encoding) + with open(filename, 'rb') as byte_stream: + encoding = detect_encoding(byte_stream.readline)[0] + stream = open(filename, 'r', newline=None, encoding=encoding) try: data = stream.read() - except UnicodeError, uex: # wrong encodingg + except UnicodeError: # wrong encodingg # detect_encoding returns utf-8 if no encoding specified msg = 'Wrong (%s) or no encoding specified' % encoding raise AstroidBuildingException(msg) @@ -57,7 +56,7 @@ def open_source_file(filename): else: import re - _ENCODING_RGX = re.compile("\s*#+.*coding[:=]\s*([-\w.]+)") + _ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)") def _guess_encoding(string): """get encoding from a python file as string or return None if not found @@ -117,22 +116,23 @@ def file_build(self, path, modname=None): """ try: stream, encoding, data = open_source_file(path) - except IOError, exc: + except IOError as exc: msg = 'Unable to load file %r (%s)' % (path, exc) raise AstroidBuildingException(msg) - except SyntaxError, exc: # py3k encoding specification error + except SyntaxError as exc: # py3k encoding specification error raise AstroidBuildingException(exc) - except LookupError, exc: # unknown encoding + except LookupError as exc: # unknown encoding raise AstroidBuildingException(exc) - # get module name if necessary - if modname is None: - try: - modname = '.'.join(modpath_from_file(path)) - except ImportError: - modname = splitext(basename(path))[0] - # build astroid representation - module = self._data_build(data, modname, path) - return self._post_build(module, encoding) + with stream: + # get module name if necessary + if modname is None: + try: + modname = '.'.join(modpath_from_file(path)) + except ImportError: + modname = splitext(basename(path))[0] + # build astroid representation + module = self._data_build(data, modname, path) + return self._post_build(module, encoding) def string_build(self, data, modname='', path=None): """build astroid from source code string and return rebuilded astroid""" @@ -160,7 +160,10 @@ def _post_build(self, module, encoding): def _data_build(self, data, modname, path): """build tree node from data and add some informations""" # this method could be wrapped with a pickle/cache function - node = parse(data + '\n') + try: + node = parse(data + '\n') + except TypeError as exc: + raise AstroidBuildingException(exc) if path is not None: node_file = abspath(path) else: @@ -171,8 +174,7 @@ def _data_build(self, data, modname, path): else: package = path and path.find('__init__.py') > -1 or False rebuilder = TreeRebuilder(self._manager) - module = rebuilder.visit_module(node, modname, package) - module.file = module.path = node_file + module = rebuilder.visit_module(node, modname, node_file, package) module._from_nodes = rebuilder._from_nodes module._delayed_assattr = rebuilder._delayed_assattr return module @@ -188,8 +190,8 @@ def sort_locals(my_list): for (name, asname) in node.names: if name == '*': try: - imported = node.root().import_module(node.modname) - except AstroidBuildingException: + imported = node.do_import_module() + except InferenceError: continue for name in imported.wildcard_import_names(): node.parent.set_local(name, node) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/exceptions.py b/pymode/libs/astroid/exceptions.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/exceptions.py rename to pymode/libs/astroid/exceptions.py diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py b/pymode/libs/astroid/inference.py similarity index 87% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py rename to pymode/libs/astroid/inference.py index 35cce332..22807049 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/inference.py +++ b/pymode/libs/astroid/inference.py @@ -25,16 +25,19 @@ from astroid import nodes from astroid.manager import AstroidManager -from astroid.exceptions import (AstroidError, - InferenceError, NoDefault, NotFoundError, UnresolvableName) -from astroid.bases import YES, Instance, InferenceContext, \ - _infer_stmts, copy_context, path_wrapper, raise_if_nothing_infered -from astroid.protocols import _arguments_infer_argname +from astroid.exceptions import (AstroidError, InferenceError, NoDefault, + NotFoundError, UnresolvableName) +from astroid.bases import (YES, Instance, InferenceContext, + _infer_stmts, copy_context, path_wrapper, + raise_if_nothing_infered) +from astroid.protocols import ( + _arguments_infer_argname, + BIN_OP_METHOD, UNARY_OP_METHOD) MANAGER = AstroidManager() -class CallContext: +class CallContext(object): """when inferring a function call, this class is used to remember values given as argument """ @@ -141,11 +144,37 @@ def infer_end(self, context=None): nodes.Dict._infer = infer_end nodes.Set._infer = infer_end +def _higher_function_scope(node): + """ Search for the first function which encloses the given + scope. This can be used for looking up in that function's + scope, in case looking up in a lower scope for a particular + name fails. + + :param node: A scope node. + :returns: + ``None``, if no parent function scope was found, + otherwise an instance of :class:`astroid.scoped_nodes.Function`, + which encloses the given node. + """ + current = node + while current.parent and not isinstance(current.parent, nodes.Function): + current = current.parent + if current and current.parent: + return current.parent + def infer_name(self, context=None): """infer a Name: use name lookup rules""" frame, stmts = self.lookup(self.name) if not stmts: - raise UnresolvableName(self.name) + # Try to see if the name is enclosed in a nested function + # and use the higher (first function) scope for searching. + # TODO: should this be promoted to other nodes as well? + parent_function = _higher_function_scope(self.scope()) + if parent_function: + _, stmts = parent_function.lookup(self.name) + + if not stmts: + raise UnresolvableName(self.name) context = context.clone() context.lookupname = self.name return _infer_stmts(stmts, context, frame) @@ -197,7 +226,7 @@ def infer_from(self, context=None, asname=True): raise InferenceError() if asname: name = self.real_name(name) - module = self.do_import_module(self.modname) + module = self.do_import_module() try: context = copy_context(context) context.lookupname = name @@ -209,7 +238,6 @@ def infer_from(self, context=None, asname=True): def infer_getattr(self, context=None): """infer a Getattr node by using getattr on the associated object""" - #context = context.clone() for owner in self.expr.infer(context): if owner is YES: yield owner @@ -240,12 +268,12 @@ def infer_global(self, context=None): def infer_subscript(self, context=None): """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]""" - value = self.value.infer(context).next() + value = next(self.value.infer(context)) if value is YES: yield YES return - index = self.slice.infer(context).next() + index = next(self.slice.infer(context)) if index is YES: yield YES return @@ -258,6 +286,12 @@ def infer_subscript(self, context=None): except (IndexError, TypeError): yield YES return + + # Prevent inferring if the infered subscript + # is the same as the original subscripted object. + if self is assigned: + yield YES + return for infered in assigned.infer(context): yield infered else: @@ -265,13 +299,6 @@ def infer_subscript(self, context=None): nodes.Subscript._infer = path_wrapper(infer_subscript) nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript) - -UNARY_OP_METHOD = {'+': '__pos__', - '-': '__neg__', - '~': '__invert__', - 'not': None, # XXX not '__nonzero__' - } - def infer_unaryop(self, context=None): for operand in self.operand.infer(context): try: @@ -294,21 +321,6 @@ def infer_unaryop(self, context=None): yield YES nodes.UnaryOp._infer = path_wrapper(infer_unaryop) - -BIN_OP_METHOD = {'+': '__add__', - '-': '__sub__', - '/': '__div__', - '//': '__floordiv__', - '*': '__mul__', - '**': '__power__', - '%': '__mod__', - '&': '__and__', - '|': '__or__', - '^': '__xor__', - '<<': '__lshift__', - '>>': '__rshift__', - } - def _infer_binop(operator, operand1, operand2, context, failures=None): if operand1 is YES: yield operand1 @@ -381,7 +393,7 @@ def infer_empty_node(self, context=None): else: try: for infered in MANAGER.infer_ast_from_something(self.object, - context=context): + context=context): yield infered except AstroidError: yield YES diff --git a/pymode/libs/astroid/inspector.py b/pymode/libs/astroid/inspector.py new file mode 100644 index 00000000..1fc31926 --- /dev/null +++ b/pymode/libs/astroid/inspector.py @@ -0,0 +1,273 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""visitor doing some postprocessing on the astroid tree. +Try to resolve definitions (namespace) dictionary, relationship... + +This module has been imported from pyreverse +""" + +__docformat__ = "restructuredtext en" + +from os.path import dirname + +import astroid +from astroid.exceptions import InferenceError +from astroid.utils import LocalsVisitor +from astroid.modutils import get_module_part, is_relative, is_standard_module + +class IdGeneratorMixIn(object): + """ + Mixin adding the ability to generate integer uid + """ + def __init__(self, start_value=0): + self.id_count = start_value + + def init_counter(self, start_value=0): + """init the id counter + """ + self.id_count = start_value + + def generate_id(self): + """generate a new identifier + """ + self.id_count += 1 + return self.id_count + + +class Linker(IdGeneratorMixIn, LocalsVisitor): + """ + walk on the project tree and resolve relationships. + + According to options the following attributes may be added to visited nodes: + + * uid, + a unique identifier for the node (on astroid.Project, astroid.Module, + astroid.Class and astroid.locals_type). Only if the linker has been instantiated + with tag=True parameter (False by default). + + * Function + a mapping from locals names to their bounded value, which may be a + constant like a string or an integer, or an astroid node (on astroid.Module, + astroid.Class and astroid.Function). + + * instance_attrs_type + as locals_type but for klass member attributes (only on astroid.Class) + + * implements, + list of implemented interface _objects_ (only on astroid.Class nodes) + """ + + def __init__(self, project, inherited_interfaces=0, tag=False): + IdGeneratorMixIn.__init__(self) + LocalsVisitor.__init__(self) + # take inherited interface in consideration or not + self.inherited_interfaces = inherited_interfaces + # tag nodes or not + self.tag = tag + # visited project + self.project = project + + + def visit_project(self, node): + """visit an astroid.Project node + + * optionally tag the node with a unique id + """ + if self.tag: + node.uid = self.generate_id() + for module in node.modules: + self.visit(module) + + def visit_package(self, node): + """visit an astroid.Package node + + * optionally tag the node with a unique id + """ + if self.tag: + node.uid = self.generate_id() + for subelmt in node.values(): + self.visit(subelmt) + + def visit_module(self, node): + """visit an astroid.Module node + + * set the locals_type mapping + * set the depends mapping + * optionally tag the node with a unique id + """ + if hasattr(node, 'locals_type'): + return + node.locals_type = {} + node.depends = [] + if self.tag: + node.uid = self.generate_id() + + def visit_class(self, node): + """visit an astroid.Class node + + * set the locals_type and instance_attrs_type mappings + * set the implements list and build it + * optionally tag the node with a unique id + """ + if hasattr(node, 'locals_type'): + return + node.locals_type = {} + if self.tag: + node.uid = self.generate_id() + # resolve ancestors + for baseobj in node.ancestors(recurs=False): + specializations = getattr(baseobj, 'specializations', []) + specializations.append(node) + baseobj.specializations = specializations + # resolve instance attributes + node.instance_attrs_type = {} + for assattrs in node.instance_attrs.values(): + for assattr in assattrs: + self.handle_assattr_type(assattr, node) + # resolve implemented interface + try: + node.implements = list(node.interfaces(self.inherited_interfaces)) + except InferenceError: + node.implements = () + + def visit_function(self, node): + """visit an astroid.Function node + + * set the locals_type mapping + * optionally tag the node with a unique id + """ + if hasattr(node, 'locals_type'): + return + node.locals_type = {} + if self.tag: + node.uid = self.generate_id() + + link_project = visit_project + link_module = visit_module + link_class = visit_class + link_function = visit_function + + def visit_assname(self, node): + """visit an astroid.AssName node + + handle locals_type + """ + # avoid double parsing done by different Linkers.visit + # running over the same project: + if hasattr(node, '_handled'): + return + node._handled = True + if node.name in node.frame(): + frame = node.frame() + else: + # the name has been defined as 'global' in the frame and belongs + # there. Btw the frame is not yet visited as the name is in the + # root locals; the frame hence has no locals_type attribute + frame = node.root() + try: + values = node.infered() + try: + already_infered = frame.locals_type[node.name] + for valnode in values: + if not valnode in already_infered: + already_infered.append(valnode) + except KeyError: + frame.locals_type[node.name] = values + except astroid.InferenceError: + pass + + def handle_assattr_type(self, node, parent): + """handle an astroid.AssAttr node + + handle instance_attrs_type + """ + try: + values = list(node.infer()) + try: + already_infered = parent.instance_attrs_type[node.attrname] + for valnode in values: + if not valnode in already_infered: + already_infered.append(valnode) + except KeyError: + parent.instance_attrs_type[node.attrname] = values + except astroid.InferenceError: + pass + + def visit_import(self, node): + """visit an astroid.Import node + + resolve module dependencies + """ + context_file = node.root().file + for name in node.names: + relative = is_relative(name[0], context_file) + self._imported_module(node, name[0], relative) + + + def visit_from(self, node): + """visit an astroid.From node + + resolve module dependencies + """ + basename = node.modname + context_file = node.root().file + if context_file is not None: + relative = is_relative(basename, context_file) + else: + relative = False + for name in node.names: + if name[0] == '*': + continue + # analyze dependencies + fullname = '%s.%s' % (basename, name[0]) + if fullname.find('.') > -1: + try: + # XXX: don't use get_module_part, missing package precedence + fullname = get_module_part(fullname, context_file) + except ImportError: + continue + if fullname != basename: + self._imported_module(node, fullname, relative) + + + def compute_module(self, context_name, mod_path): + """return true if the module should be added to dependencies""" + package_dir = dirname(self.project.path) + if context_name == mod_path: + return 0 + elif is_standard_module(mod_path, (package_dir,)): + return 1 + return 0 + + # protected methods ######################################################## + + def _imported_module(self, node, mod_path, relative): + """notify an imported module, used to analyze dependencies + """ + module = node.root() + context_name = module.name + if relative: + mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]), + mod_path) + if self.compute_module(context_name, mod_path): + # handle dependencies + if not hasattr(module, 'depends'): + module.depends = [] + mod_paths = module.depends + if not mod_path in mod_paths: + mod_paths.append(mod_path) diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py b/pymode/libs/astroid/manager.py similarity index 73% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py rename to pymode/libs/astroid/manager.py index 058e845e..b1fb3058 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/manager.py +++ b/pymode/libs/astroid/manager.py @@ -19,27 +19,31 @@ possible by providing a class responsible to get astroid representation from various source and using a cache of built modules) """ +from __future__ import print_function __docformat__ = "restructuredtext en" +import collections +import imp import os from os.path import dirname, join, isdir, exists +from warnings import warn +import zipimport -from logilab.common.modutils import NoSourceFile, is_python_source, \ - file_from_modpath, load_module_from_name, modpath_from_file, \ - get_module_files, get_source_file, zipimport from logilab.common.configuration import OptionsProviderMixIn from astroid.exceptions import AstroidBuildingException +from astroid import modutils + def astroid_wrapper(func, modname): """wrapper to give to AstroidManager.project_from_files""" - print 'parsing %s...' % modname + print('parsing %s...' % modname) try: return func(modname) - except AstroidBuildingException, exc: - print exc - except Exception, exc: + except AstroidBuildingException as exc: + print(exc) + except Exception as exc: import traceback traceback.print_exc() @@ -73,7 +77,7 @@ class AstroidManager(OptionsProviderMixIn): {'default': "No Name", 'type' : 'string', 'short': 'p', 'metavar' : '', 'help' : 'set the project name.'}), - ) + ) brain = {} def __init__(self): self.__dict__ = AstroidManager.brain @@ -83,18 +87,22 @@ def __init__(self): # NOTE: cache entries are added by the [re]builder self.astroid_cache = {} self._mod_file_cache = {} - self.transforms = {} + self.transforms = collections.defaultdict(list) + self._failed_import_hooks = [] + self.always_load_extensions = False + self.optimize_ast = False + self.extension_package_whitelist = set() def ast_from_file(self, filepath, modname=None, fallback=True, source=False): """given a module name, return the astroid object""" try: - filepath = get_source_file(filepath, include_no_ext=True) + filepath = modutils.get_source_file(filepath, include_no_ext=True) source = True - except NoSourceFile: + except modutils.NoSourceFile: pass if modname is None: try: - modname = '.'.join(modpath_from_file(filepath)) + modname = '.'.join(modutils.modpath_from_file(filepath)) except ImportError: modname = filepath if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: @@ -105,32 +113,58 @@ def ast_from_file(self, filepath, modname=None, fallback=True, source=False): elif fallback and modname: return self.ast_from_module_name(modname) raise AstroidBuildingException('unable to get astroid for file %s' % - filepath) + filepath) + + def _build_stub_module(self, modname): + from astroid.builder import AstroidBuilder + return AstroidBuilder(self).string_build('', modname) + + def _can_load_extension(self, modname): + if self.always_load_extensions: + return True + if modutils.is_standard_module(modname): + return True + parts = modname.split('.') + return any( + '.'.join(parts[:x]) in self.extension_package_whitelist + for x in range(1, len(parts) + 1)) def ast_from_module_name(self, modname, context_file=None): """given a module name, return the astroid object""" if modname in self.astroid_cache: return self.astroid_cache[modname] if modname == '__main__': - from astroid.builder import AstroidBuilder - return AstroidBuilder(self).string_build('', modname) + return self._build_stub_module(modname) old_cwd = os.getcwd() if context_file: os.chdir(dirname(context_file)) try: - filepath = self.file_from_module_name(modname, context_file) - if filepath is not None and not is_python_source(filepath): + filepath, mp_type = self.file_from_module_name(modname, context_file) + if mp_type == modutils.PY_ZIPMODULE: module = self.zip_import_data(filepath) if module is not None: return module - if filepath is None or not is_python_source(filepath): + elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION): + if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname): + return self._build_stub_module(modname) try: - module = load_module_from_name(modname) - except Exception, ex: + module = modutils.load_module_from_name(modname) + except Exception as ex: msg = 'Unable to load module %s (%s)' % (modname, ex) raise AstroidBuildingException(msg) return self.ast_from_module(module, modname) + elif mp_type == imp.PY_COMPILED: + raise AstroidBuildingException("Unable to load compiled module %s" % (modname,)) + if filepath is None: + raise AstroidBuildingException("Unable to load module %s" % (modname,)) return self.ast_from_file(filepath, modname, fallback=False) + except AstroidBuildingException as e: + for hook in self._failed_import_hooks: + try: + return hook(modname) + except AstroidBuildingException: + pass + raise e finally: os.chdir(old_cwd) @@ -141,14 +175,14 @@ def zip_import_data(self, filepath): builder = AstroidBuilder(self) for ext in ('.zip', '.egg'): try: - eggpath, resource = filepath.rsplit(ext + '/', 1) + eggpath, resource = filepath.rsplit(ext + os.path.sep, 1) except ValueError: continue try: importer = zipimport.zipimporter(eggpath + ext) - zmodname = resource.replace('/', '.') + zmodname = resource.replace(os.path.sep, '.') if importer.is_package(resource): - zmodname = zmodname + '.__init__' + zmodname = zmodname + '.__init__' module = builder.string_build(importer.get_source(resource), zmodname, filepath) return module @@ -161,9 +195,9 @@ def file_from_module_name(self, modname, contextfile): value = self._mod_file_cache[(modname, contextfile)] except KeyError: try: - value = file_from_modpath(modname.split('.'), - context_file=contextfile) - except ImportError, ex: + value = modutils.file_info_from_modpath( + modname.split('.'), context_file=contextfile) + except ImportError as ex: msg = 'Unable to load module %s (%s)' % (modname, ex) value = AstroidBuildingException(msg) self._mod_file_cache[(modname, contextfile)] = value @@ -179,7 +213,7 @@ def ast_from_module(self, module, modname=None): try: # some builtin modules don't have __file__ attribute filepath = module.__file__ - if is_python_source(filepath): + if modutils.is_python_source(filepath): return self.ast_from_file(filepath, modname) except AttributeError: pass @@ -209,7 +243,7 @@ def infer_ast_from_something(self, obj, context=None): except AttributeError: raise AstroidBuildingException( 'Unable to get module for %s' % safe_repr(klass)) - except Exception, ex: + except Exception as ex: raise AstroidBuildingException( 'Unexpected error while retrieving module for %s: %s' % (safe_repr(klass), ex)) @@ -218,7 +252,7 @@ def infer_ast_from_something(self, obj, context=None): except AttributeError: raise AstroidBuildingException( 'Unable to get name for %s' % safe_repr(klass)) - except Exception, ex: + except Exception as ex: raise AstroidBuildingException( 'Unexpected error while retrieving name for %s: %s' % (safe_repr(klass), ex)) @@ -240,7 +274,7 @@ def project_from_files(self, files, func_wrapper=astroid_wrapper, project = Project(project_name) for something in files: if not exists(something): - fpath = file_from_modpath(something.split('.')) + fpath = modutils.file_from_modpath(something.split('.')) elif isdir(something): fpath = join(something, '__init__.py') else: @@ -255,8 +289,8 @@ def project_from_files(self, files, func_wrapper=astroid_wrapper, # recurse in package except if __init__ was explicitly given if astroid.package and something.find('__init__') == -1: # recurse on others packages / modules if this is a package - for fpath in get_module_files(dirname(astroid.file), - black_list): + for fpath in modutils.get_module_files(dirname(astroid.file), + black_list): astroid = func_wrapper(self.ast_from_file, fpath) if astroid is None or astroid.name == base_name: continue @@ -265,17 +299,27 @@ def project_from_files(self, files, func_wrapper=astroid_wrapper, def register_transform(self, node_class, transform, predicate=None): """Register `transform(node)` function to be applied on the given - Astroid's `node_class` if `predicate` is None or return a true value + Astroid's `node_class` if `predicate` is None or returns true when called with the node as argument. The transform function may return a value which is then used to substitute the original node in the tree. """ - self.transforms.setdefault(node_class, []).append( (transform, predicate) ) + self.transforms[node_class].append((transform, predicate)) def unregister_transform(self, node_class, transform, predicate=None): """Unregister the given transform.""" - self.transforms[node_class].remove( (transform, predicate) ) + self.transforms[node_class].remove((transform, predicate)) + + def register_failed_import_hook(self, hook): + """Registers a hook to resolve imports that cannot be found otherwise. + + `hook` must be a function that accepts a single argument `modname` which + contains the name of the module or package that could not be imported. + If `hook` can resolve the import, must return a node of type `astroid.Module`, + otherwise, it must raise `AstroidBuildingException`. + """ + self._failed_import_hooks.append(hook) def transform(self, node): """Call matching transforms for the given node if any and return the @@ -297,7 +341,7 @@ def transform(self, node): if node is not orig_node: # node has already be modified by some previous # transformation, warn about it - warn('node %s substitued multiple times' % node) + warn('node %s substituted multiple times' % node) node = ret return node @@ -305,6 +349,17 @@ def cache_module(self, module): """Cache a module if no module with the same name is known yet.""" self.astroid_cache.setdefault(module.name, module) + def clear_cache(self, astroid_builtin=None): + # XXX clear transforms + self.astroid_cache.clear() + # force bootstrap again, else we may ends up with cache inconsistency + # between the manager and CONST_PROXY, making + # unittest_lookup.LookupTC.test_builtin_lookup fail depending on the + # test order + import astroid.raw_building + astroid.raw_building._astroid_bootstrapping( + astroid_builtin=astroid_builtin) + class Project(object): """a project handle a set of modules / packages""" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/mixins.py b/pymode/libs/astroid/mixins.py similarity index 92% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/mixins.py rename to pymode/libs/astroid/mixins.py index 5e7b7878..dbf1673a 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/mixins.py +++ b/pymode/libs/astroid/mixins.py @@ -18,16 +18,18 @@ """This module contains some mixins for the different nodes. """ +from logilab.common.decorators import cachedproperty + from astroid.exceptions import (AstroidBuildingException, InferenceError, - NotFoundError) + NotFoundError) class BlockRangeMixIn(object): """override block range """ - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self._blockstart_toline() + + @cachedproperty + def blockstart_tolineno(self): + return self.lineno def _elsed_block_range(self, lineno, orelse, last=None): """handle block line numbers range for try/finally, for, if and while @@ -85,7 +87,7 @@ class FromImportMixIn(FilterStmtsMixin): def _infer_name(self, frame, name): return name - def do_import_module(self, modname): + def do_import_module(self, modname=None): """return the ast for a module whose name is imported by """ # handle special case where we are on a package node importing a module @@ -94,6 +96,8 @@ def do_import_module(self, modname): # XXX: no more needed ? mymodule = self.root() level = getattr(self, 'level', None) # Import as no level + if modname is None: + modname = self.modname # XXX we should investigate deeper if we really want to check # importing itself: modname and mymodule.name be relative or absolute if mymodule.relative_to_absolute_name(modname, level) == mymodule.name: @@ -103,7 +107,7 @@ def do_import_module(self, modname): return mymodule.import_module(modname, level=level) except AstroidBuildingException: raise InferenceError(modname) - except SyntaxError, ex: + except SyntaxError as ex: raise InferenceError(str(ex)) def real_name(self, asname): @@ -118,5 +122,3 @@ def real_name(self, asname): return name raise NotFoundError(asname) - - diff --git a/pymode/libs/astroid/modutils.py b/pymode/libs/astroid/modutils.py new file mode 100644 index 00000000..c547f3e6 --- /dev/null +++ b/pymode/libs/astroid/modutils.py @@ -0,0 +1,670 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""Python modules manipulation utility functions. + +:type PY_SOURCE_EXTS: tuple(str) +:var PY_SOURCE_EXTS: list of possible python source file extension + +:type STD_LIB_DIRS: set of str +:var STD_LIB_DIRS: directories where standard modules are located + +:type BUILTIN_MODULES: dict +:var BUILTIN_MODULES: dictionary with builtin module names has key +""" +from __future__ import with_statement + +__docformat__ = "restructuredtext en" + +import imp +import os +import sys +from distutils.sysconfig import get_python_lib +from distutils.errors import DistutilsPlatformError +import zipimport + +try: + import pkg_resources +except ImportError: + pkg_resources = None + +from logilab.common import _handle_blacklist + +PY_ZIPMODULE = object() + +if sys.platform.startswith('win'): + PY_SOURCE_EXTS = ('py', 'pyw') + PY_COMPILED_EXTS = ('dll', 'pyd') +else: + PY_SOURCE_EXTS = ('py',) + PY_COMPILED_EXTS = ('so',) + +# Notes about STD_LIB_DIRS +# Consider arch-specific installation for STD_LIB_DIRS definition +# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on +# +# :see: `Problems with /usr/lib64 builds `_ +# :see: `FHS `_ +try: + # The explicit sys.prefix is to work around a patch in virtualenv that + # replaces the 'real' sys.prefix (i.e. the location of the binary) + # with the prefix from which the virtualenv was created. This throws + # off the detection logic for standard library modules, thus the + # workaround. + STD_LIB_DIRS = set([ + get_python_lib(standard_lib=True, prefix=sys.prefix), + # Take care of installations where exec_prefix != prefix. + get_python_lib(standard_lib=True, prefix=sys.exec_prefix), + get_python_lib(standard_lib=True)]) + if os.name == 'nt': + STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) + try: + # real_prefix is defined when running inside virtualenv. + STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) + except AttributeError: + pass +# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to +# non-valid path, see https://bugs.pypy.org/issue1164 +except DistutilsPlatformError: + STD_LIB_DIRS = set() + +EXT_LIB_DIR = get_python_lib() + +BUILTIN_MODULES = dict(zip(sys.builtin_module_names, + [1]*len(sys.builtin_module_names))) + + +class NoSourceFile(Exception): + """exception raised when we are not able to get a python + source file for a precompiled file + """ + +def _normalize_path(path): + return os.path.normcase(os.path.abspath(path)) + + +_NORM_PATH_CACHE = {} + +def _cache_normalize_path(path): + """abspath with caching""" + # _module_file calls abspath on every path in sys.path every time it's + # called; on a larger codebase this easily adds up to half a second just + # assembling path components. This cache alleviates that. + try: + return _NORM_PATH_CACHE[path] + except KeyError: + if not path: # don't cache result for '' + return _normalize_path(path) + result = _NORM_PATH_CACHE[path] = _normalize_path(path) + return result + +def load_module_from_name(dotted_name, path=None, use_sys=1): + """Load a Python module from its name. + + :type dotted_name: str + :param dotted_name: python name of a module or package + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + :type use_sys: bool + :param use_sys: + boolean indicating whether the sys.modules dictionary should be + used or not + + + :raise ImportError: if the module or package is not found + + :rtype: module + :return: the loaded module + """ + return load_module_from_modpath(dotted_name.split('.'), path, use_sys) + + +def load_module_from_modpath(parts, path=None, use_sys=1): + """Load a python module from its splitted name. + + :type parts: list(str) or tuple(str) + :param parts: + python name of a module or package splitted on '.' + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + :type use_sys: bool + :param use_sys: + boolean indicating whether the sys.modules dictionary should be used or not + + :raise ImportError: if the module or package is not found + + :rtype: module + :return: the loaded module + """ + if use_sys: + try: + return sys.modules['.'.join(parts)] + except KeyError: + pass + modpath = [] + prevmodule = None + for part in parts: + modpath.append(part) + curname = '.'.join(modpath) + module = None + if len(modpath) != len(parts): + # even with use_sys=False, should try to get outer packages from sys.modules + module = sys.modules.get(curname) + elif use_sys: + # because it may have been indirectly loaded through a parent + module = sys.modules.get(curname) + if module is None: + mp_file, mp_filename, mp_desc = imp.find_module(part, path) + module = imp.load_module(curname, mp_file, mp_filename, mp_desc) + # mp_file still needs to be closed. + if mp_file: + mp_file.close() + if prevmodule: + setattr(prevmodule, part, module) + _file = getattr(module, '__file__', '') + if not _file and len(modpath) != len(parts): + raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) + path = [os.path.dirname(_file)] + prevmodule = module + return module + + +def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): + """Load a Python module from it's path. + + :type filepath: str + :param filepath: path to the python module or package + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + :type use_sys: bool + :param use_sys: + boolean indicating whether the sys.modules dictionary should be + used or not + + + :raise ImportError: if the module or package is not found + + :rtype: module + :return: the loaded module + """ + modpath = modpath_from_file(filepath, extrapath) + return load_module_from_modpath(modpath, path, use_sys) + + +def _check_init(path, mod_path): + """check there are some __init__.py all along the way""" + for part in mod_path: + path = os.path.join(path, part) + if not _has_init(path): + return False + return True + + +def modpath_from_file(filename, extrapath=None): + """given a file path return the corresponding splitted module's name + (i.e name of a module or package splitted on '.') + + :type filename: str + :param filename: file's path for which we want the module's name + + :type extrapath: dict + :param extrapath: + optional extra search path, with path as key and package name for the path + as value. This is usually useful to handle package splitted in multiple + directories using __path__ trick. + + + :raise ImportError: + if the corresponding module's name has not been found + + :rtype: list(str) + :return: the corresponding splitted module's name + """ + base = os.path.splitext(os.path.abspath(filename))[0] + if extrapath is not None: + for path_ in extrapath: + path = os.path.abspath(path_) + if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path): + submodpath = [pkg for pkg in base[len(path):].split(os.sep) + if pkg] + if _check_init(path, submodpath[:-1]): + return extrapath[path_].split('.') + submodpath + for path in sys.path: + path = _cache_normalize_path(path) + if path and os.path.normcase(base).startswith(path): + modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] + if _check_init(path, modpath[:-1]): + return modpath + raise ImportError('Unable to find module for %s in %s' % ( + filename, ', \n'.join(sys.path))) + + +def file_from_modpath(modpath, path=None, context_file=None): + return file_info_from_modpath(modpath, path, context_file)[0] + +def file_info_from_modpath(modpath, path=None, context_file=None): + """given a mod path (i.e. splitted module / package name), return the + corresponding file, giving priority to source file over precompiled + file if it exists + + :type modpath: list or tuple + :param modpath: + splitted module's name (i.e name of a module or package splitted + on '.') + (this means explicit relative imports that start with dots have + empty strings in this list!) + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + :type context_file: str or None + :param context_file: + context file to consider, necessary if the identifier has been + introduced using a relative import unresolvable in the actual + context (i.e. modutils) + + :raise ImportError: if there is no such module in the directory + + :rtype: (str or None, import type) + :return: + the path to the module's file or None if it's an integrated + builtin module such as 'sys' + """ + if context_file is not None: + context = os.path.dirname(context_file) + else: + context = context_file + if modpath[0] == 'xml': + # handle _xmlplus + try: + return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context) + except ImportError: + return _file_from_modpath(modpath, path, context) + elif modpath == ['os', 'path']: + # FIXME: currently ignoring search_path... + return os.path.__file__, imp.PY_SOURCE + return _file_from_modpath(modpath, path, context) + + +def get_module_part(dotted_name, context_file=None): + """given a dotted name return the module part of the name : + + >>> get_module_part('logilab.common.modutils.get_module_part') + 'logilab.common.modutils' + + :type dotted_name: str + :param dotted_name: full name of the identifier we are interested in + + :type context_file: str or None + :param context_file: + context file to consider, necessary if the identifier has been + introduced using a relative import unresolvable in the actual + context (i.e. modutils) + + + :raise ImportError: if there is no such module in the directory + + :rtype: str or None + :return: + the module part of the name or None if we have not been able at + all to import the given name + + XXX: deprecated, since it doesn't handle package precedence over module + (see #10066) + """ + # os.path trick + if dotted_name.startswith('os.path'): + return 'os.path' + parts = dotted_name.split('.') + if context_file is not None: + # first check for builtin module which won't be considered latter + # in that case (path != None) + if parts[0] in BUILTIN_MODULES: + if len(parts) > 2: + raise ImportError(dotted_name) + return parts[0] + # don't use += or insert, we want a new list to be created ! + path = None + starti = 0 + if parts[0] == '': + assert context_file is not None, \ + 'explicit relative import, but no context_file?' + path = [] # prevent resolving the import non-relatively + starti = 1 + while parts[starti] == '': # for all further dots: change context + starti += 1 + context_file = os.path.dirname(context_file) + for i in range(starti, len(parts)): + try: + file_from_modpath(parts[starti:i+1], path=path, + context_file=context_file) + except ImportError: + if not i >= max(1, len(parts) - 2): + raise + return '.'.join(parts[:i]) + return dotted_name + + +def get_module_files(src_directory, blacklist): + """given a package directory return a list of all available python + module's files in the package and its subpackages + + :type src_directory: str + :param src_directory: + path of the directory corresponding to the package + + :type blacklist: list or tuple + :param blacklist: + optional list of files or directory to ignore, default to the value of + `logilab.common.STD_BLACKLIST` + + :rtype: list + :return: + the list of all available python module's files in the package and + its subpackages + """ + files = [] + for directory, dirnames, filenames in os.walk(src_directory): + _handle_blacklist(blacklist, dirnames, filenames) + # check for __init__.py + if not '__init__.py' in filenames: + dirnames[:] = () + continue + for filename in filenames: + if _is_python_file(filename): + src = os.path.join(directory, filename) + files.append(src) + return files + + +def get_source_file(filename, include_no_ext=False): + """given a python module's file name return the matching source file + name (the filename will be returned identically if it's a already an + absolute path to a python source file...) + + :type filename: str + :param filename: python module's file name + + + :raise NoSourceFile: if no source file exists on the file system + + :rtype: str + :return: the absolute path of the source file if it exists + """ + base, orig_ext = os.path.splitext(os.path.abspath(filename)) + for ext in PY_SOURCE_EXTS: + source_path = '%s.%s' % (base, ext) + if os.path.exists(source_path): + return source_path + if include_no_ext and not orig_ext and os.path.exists(base): + return base + raise NoSourceFile(filename) + + +def is_python_source(filename): + """ + rtype: bool + return: True if the filename is a python source file + """ + return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS + + +def is_standard_module(modname, std_path=None): + """try to guess if a module is a standard python module (by default, + see `std_path` parameter's description) + + :type modname: str + :param modname: name of the module we are interested in + + :type std_path: list(str) or tuple(str) + :param std_path: list of path considered has standard + + + :rtype: bool + :return: + true if the module: + - is located on the path listed in one of the directory in `std_path` + - is a built-in module + """ + modname = modname.split('.')[0] + try: + filename = file_from_modpath([modname]) + except ImportError: + # import failed, i'm probably not so wrong by supposing it's + # not standard... + return False + # modules which are not living in a file are considered standard + # (sys and __builtin__ for instance) + if filename is None: + return True + filename = _normalize_path(filename) + if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): + return False + if std_path is None: + std_path = STD_LIB_DIRS + for path in std_path: + if filename.startswith(_cache_normalize_path(path)): + return True + return False + + + +def is_relative(modname, from_file): + """return true if the given module name is relative to the given + file name + + :type modname: str + :param modname: name of the module we are interested in + + :type from_file: str + :param from_file: + path of the module from which modname has been imported + + :rtype: bool + :return: + true if the module has been imported relatively to `from_file` + """ + if not os.path.isdir(from_file): + from_file = os.path.dirname(from_file) + if from_file in sys.path: + return False + try: + stream, _, _ = imp.find_module(modname.split('.')[0], [from_file]) + + # Close the stream to avoid ResourceWarnings. + if stream: + stream.close() + return True + except ImportError: + return False + + +# internal only functions ##################################################### + +def _file_from_modpath(modpath, path=None, context=None): + """given a mod path (i.e. splitted module / package name), return the + corresponding file + + this function is used internally, see `file_from_modpath`'s + documentation for more information + """ + assert len(modpath) > 0 + if context is not None: + try: + mtype, mp_filename = _module_file(modpath, [context]) + except ImportError: + mtype, mp_filename = _module_file(modpath, path) + else: + mtype, mp_filename = _module_file(modpath, path) + if mtype == imp.PY_COMPILED: + try: + return get_source_file(mp_filename), imp.PY_SOURCE + except NoSourceFile: + return mp_filename, imp.PY_COMPILED + elif mtype == imp.C_BUILTIN: + # integrated builtin module + return None, imp.C_BUILTIN + elif mtype == imp.PKG_DIRECTORY: + mp_filename = _has_init(mp_filename) + mtype = imp.PY_SOURCE + return mp_filename, mtype + +def _search_zip(modpath, pic): + for filepath, importer in pic.items(): + if importer is not None: + if importer.find_module(modpath[0]): + if not importer.find_module(os.path.sep.join(modpath)): + raise ImportError('No module named %s in %s/%s' % ( + '.'.join(modpath[1:]), filepath, modpath)) + return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath + raise ImportError('No module named %s' % '.'.join(modpath)) + + +def _module_file(modpath, path=None): + """get a module type / file path + + :type modpath: list or tuple + :param modpath: + splitted module's name (i.e name of a module or package splitted + on '.'), with leading empty strings for explicit relative import + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + + :rtype: tuple(int, str) + :return: the module type flag and the file path for a module + """ + # egg support compat + try: + pic = sys.path_importer_cache + _path = (path is None and sys.path or path) + for __path in _path: + if not __path in pic: + try: + pic[__path] = zipimport.zipimporter(__path) + except zipimport.ZipImportError: + pic[__path] = None + checkeggs = True + except AttributeError: + checkeggs = False + # pkg_resources support (aka setuptools namespace packages) + if (pkg_resources is not None + and modpath[0] in pkg_resources._namespace_packages + and modpath[0] in sys.modules + and len(modpath) > 1): + # setuptools has added into sys.modules a module object with proper + # __path__, get back information from there + module = sys.modules[modpath.pop(0)] + path = module.__path__ + imported = [] + while modpath: + modname = modpath[0] + # take care to changes in find_module implementation wrt builtin modules + # + # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23) + # >>> imp.find_module('posix') + # (None, 'posix', ('', '', 6)) + # + # Python 3.3.1 (default, Apr 26 2013, 12:08:46) + # >>> imp.find_module('posix') + # (None, None, ('', '', 6)) + try: + stream, mp_filename, mp_desc = imp.find_module(modname, path) + except ImportError: + if checkeggs: + return _search_zip(modpath, pic)[:2] + raise + else: + # Don't forget to close the stream to avoid + # spurious ResourceWarnings. + if stream: + stream.close() + + if checkeggs and mp_filename: + fullabspath = [_cache_normalize_path(x) for x in _path] + try: + pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename))) + emtype, emp_filename, zippath = _search_zip(modpath, pic) + if pathindex > _path.index(zippath): + # an egg takes priority + return emtype, emp_filename + except ValueError: + # XXX not in _path + pass + except ImportError: + pass + checkeggs = False + imported.append(modpath.pop(0)) + mtype = mp_desc[2] + if modpath: + if mtype != imp.PKG_DIRECTORY: + raise ImportError('No module %s in %s' % ('.'.join(modpath), + '.'.join(imported))) + # XXX guess if package is using pkgutil.extend_path by looking for + # those keywords in the first four Kbytes + try: + with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream: + data = stream.read(4096) + except IOError: + path = [mp_filename] + else: + if b'pkgutil' in data and b'extend_path' in data: + # extend_path is called, search sys.path for module/packages + # of this name see pkgutil.extend_path documentation + path = [os.path.join(p, *imported) for p in sys.path + if os.path.isdir(os.path.join(p, *imported))] + else: + path = [mp_filename] + return mtype, mp_filename + +def _is_python_file(filename): + """return true if the given filename should be considered as a python file + + .pyc and .pyo are ignored + """ + for ext in ('.py', '.so', '.pyd', '.pyw'): + if filename.endswith(ext): + return True + return False + + +def _has_init(directory): + """if the given directory has a valid __init__ file, return its path, + else return None + """ + mod_or_pack = os.path.join(directory, '__init__') + for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'): + if os.path.exists(mod_or_pack + '.' + ext): + return mod_or_pack + '.' + ext + return None diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py b/pymode/libs/astroid/node_classes.py similarity index 88% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py rename to pymode/libs/astroid/node_classes.py index 01dc8d92..4b413ef8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/node_classes.py +++ b/pymode/libs/astroid/node_classes.py @@ -20,11 +20,16 @@ import sys +import six +from logilab.common.decorators import cachedproperty + from astroid.exceptions import NoDefault from astroid.bases import (NodeNG, Statement, Instance, InferenceContext, - _infer_stmts, YES, BUILTINS) -from astroid.mixins import BlockRangeMixIn, AssignTypeMixin, \ - ParentAssignTypeMixin, FromImportMixIn + _infer_stmts, YES, BUILTINS) +from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin, + ParentAssignTypeMixin, FromImportMixIn) + +PY3K = sys.version_info >= (3, 0) def unpack_infer(stmt, context=None): @@ -37,7 +42,7 @@ def unpack_infer(stmt, context=None): yield infered_elt return # if infered is a final node, return it and stop - infered = stmt.infer(context).next() + infered = next(stmt.infer(context)) if infered is stmt: yield infered return @@ -82,16 +87,16 @@ def are_exclusive(stmt1, stmt2, exceptions=None): # nodes are in exclusive branches if isinstance(node, If) and exceptions is None: if (node.locate_child(previous)[1] - is not node.locate_child(children[node])[1]): + is not node.locate_child(children[node])[1]): return True elif isinstance(node, TryExcept): c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'orelse') or - (c2attr == 'orelse' and c1attr == 'handlers')): + (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or + (c2attr == 'handlers' and c1attr == 'orelse') or + (c2attr == 'orelse' and c1attr == 'handlers')): return True elif c2attr == 'handlers' and c1attr == 'handlers': return previous is not children[node] @@ -108,13 +113,13 @@ class LookupMixIn(object): def lookup(self, name): """lookup a variable name - return the scope node and the list of assignments associated to the given - name according to the scope where it has been found (locals, globals or - builtin) + return the scope node and the list of assignments associated to the + given name according to the scope where it has been found (locals, + globals or builtin) - The lookup is starting from self's scope. If self is not a frame itself and - the name is found in the inner frame locals, statements will be filtered - to remove ignorable statements according to self's location + The lookup is starting from self's scope. If self is not a frame itself + and the name is found in the inner frame locals, statements will be + filtered to remove ignorable statements according to self's location """ return self.scope().scope_lookup(self, name) @@ -144,6 +149,20 @@ def _filter_stmts(self, stmts, frame, offset): myframe = self.frame().parent.frame() else: myframe = self.frame() + # If the frame of this node is the same as the statement + # of this node, then the node is part of a class or + # a function definition and the frame of this node should be the + # the upper frame, not the frame of the definition. + # For more information why this is important, + # see Pylint issue #295. + # For example, for 'b', the statement is the same + # as the frame / scope: + # + # def test(b=1): + # ... + + if self.statement() is myframe and myframe.parent: + myframe = myframe.parent.frame() if not myframe is frame or self is frame: return stmts mystmt = self.statement() @@ -253,7 +272,26 @@ class Name(LookupMixIn, NodeNG): class Arguments(NodeNG, AssignTypeMixin): """class representing an Arguments node""" - _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults') + if PY3K: + # Python 3.4+ uses a different approach regarding annotations, + # each argument is a new class, _ast.arg, which exposes an + # 'annotation' attribute. In astroid though, arguments are exposed + # as is in the Arguments node and the only way to expose annotations + # is by using something similar with Python 3.3: + # - we expose 'varargannotation' and 'kwargannotation' of annotations + # of varargs and kwargs. + # - we expose 'annotation', a list with annotations for + # for each normal argument. If an argument doesn't have an + # annotation, its value will be None. + + _astroid_fields = ('args', 'defaults', 'kwonlyargs', + 'kw_defaults', 'annotations', + 'varargannotation', 'kwargannotation') + annotations = None + varargannotation = None + kwargannotation = None + else: + _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults') args = None defaults = None kwonlyargs = None @@ -268,6 +306,11 @@ def _infer_name(self, frame, name): return name return None + @cachedproperty + def fromlineno(self): + lineno = super(Arguments, self).fromlineno + return max(lineno, self.parent.fromlineno or 0) + def format_args(self): """return arguments formatted as string""" result = [] @@ -423,7 +466,7 @@ def last_child(self): class Comprehension(NodeNG): """class representing a Comprehension node""" - _astroid_fields = ('target', 'iter' ,'ifs') + _astroid_fields = ('target', 'iter', 'ifs') target = None iter = None ifs = None @@ -454,7 +497,7 @@ def __init__(self, value=None): self.value = value def getitem(self, index, context=None): - if isinstance(self.value, basestring): + if isinstance(self.value, six.string_types): return Const(self.value[index]) raise TypeError('%r (value=%s)' % (self, self.value)) @@ -462,7 +505,7 @@ def has_dynamic_getattr(self): return False def itered(self): - if isinstance(self.value, basestring): + if isinstance(self.value, six.string_types): return self.value raise TypeError() @@ -507,7 +550,7 @@ def __init__(self, items=None): self.items = [] else: self.items = [(const_factory(k), const_factory(v)) - for k,v in items.iteritems()] + for k, v in items.items()] def pytype(self): return '%s.dict' % BUILTINS @@ -533,7 +576,8 @@ def getitem(self, lookup_key, context=None): for inferedkey in key.infer(context): if inferedkey is YES: continue - if isinstance(inferedkey, Const) and inferedkey.value == lookup_key: + if isinstance(inferedkey, Const) \ + and inferedkey.value == lookup_key: return value # This should raise KeyError, but all call sites only catch # IndexError. Let's leave it like that for now. @@ -561,7 +605,8 @@ class ExceptHandler(Statement, AssignTypeMixin): name = None body = None - def _blockstart_toline(self): + @cachedproperty + def blockstart_tolineno(self): if self.name: return self.name.tolineno elif self.type: @@ -569,11 +614,6 @@ def _blockstart_toline(self): else: return self.lineno - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self._blockstart_toline() - def catch(self, exceptions): if self.type is None or exceptions is None: return True @@ -604,14 +644,15 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement): orelse = None optional_assign = True - def _blockstart_toline(self): + @cachedproperty + def blockstart_tolineno(self): return self.iter.tolineno class From(FromImportMixIn, Statement): """class representing a From node""" - def __init__(self, fromname, names, level=0): + def __init__(self, fromname, names, level=0): self.modname = fromname self.names = names self.level = level @@ -639,7 +680,8 @@ class If(BlockRangeMixIn, Statement): body = None orelse = None - def _blockstart_toline(self): + @cachedproperty + def blockstart_tolineno(self): return self.test.tolineno def block_range(self, lineno): @@ -790,9 +832,6 @@ class TryExcept(BlockRangeMixIn, Statement): def _infer_name(self, frame, name): return name - def _blockstart_toline(self): - return self.lineno - def block_range(self, lineno): """handle block line numbers range for try/except statements""" last = None @@ -812,15 +851,12 @@ class TryFinally(BlockRangeMixIn, Statement): body = None finalbody = None - def _blockstart_toline(self): - return self.lineno - def block_range(self, lineno): """handle block line numbers range for try/finally statements""" child = self.body[0] # py2.5 try: except: finally: if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno - and lineno > self.fromlineno and lineno <= child.tolineno): + and lineno > self.fromlineno and lineno <= child.tolineno): return child.block_range(lineno) return self._elsed_block_range(lineno, self.finalbody) @@ -858,7 +894,8 @@ class While(BlockRangeMixIn, Statement): body = None orelse = None - def _blockstart_toline(self): + @cachedproperty + def blockstart_tolineno(self): return self.test.tolineno def block_range(self, lineno): @@ -872,7 +909,8 @@ class With(BlockRangeMixIn, AssignTypeMixin, Statement): items = None body = None - def _blockstart_toline(self): + @cachedproperty + def blockstart_tolineno(self): return self.items[-1][0].tolineno def get_children(self): @@ -889,7 +927,7 @@ class Yield(NodeNG): value = None class YieldFrom(Yield): - """ Class representing a YieldFrom node. """ + """ Class representing a YieldFrom node. """ # constants ############################################################## diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py b/pymode/libs/astroid/nodes.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py rename to pymode/libs/astroid/nodes.py index 263ab476..67c2f8e8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/nodes.py +++ b/pymode/libs/astroid/nodes.py @@ -34,6 +34,7 @@ """ +# pylint: disable=unused-import __docformat__ = "restructuredtext en" diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/protocols.py b/pymode/libs/astroid/protocols.py similarity index 75% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/protocols.py rename to pymode/libs/astroid/protocols.py index e66b802c..4c11f9cf 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/protocols.py +++ b/pymode/libs/astroid/protocols.py @@ -20,14 +20,35 @@ """ __doctype__ = "restructuredtext en" +import collections -from astroid.exceptions import InferenceError, NoDefault +from astroid.exceptions import InferenceError, NoDefault, NotFoundError from astroid.node_classes import unpack_infer -from astroid.bases import copy_context, \ +from astroid.bases import InferenceContext, copy_context, \ raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES from astroid.nodes import const_factory from astroid import nodes +BIN_OP_METHOD = {'+': '__add__', + '-': '__sub__', + '/': '__div__', + '//': '__floordiv__', + '*': '__mul__', + '**': '__power__', + '%': '__mod__', + '&': '__and__', + '|': '__or__', + '^': '__xor__', + '<<': '__lshift__', + '>>': '__rshift__', + } + +UNARY_OP_METHOD = {'+': '__pos__', + '-': '__neg__', + '~': '__invert__', + 'not': None, # XXX not '__nonzero__' + } + # unary operations ############################################################ def tl_infer_unary_op(self, operator): @@ -70,8 +91,8 @@ def const_infer_unary_op(self, operator): '^': lambda a, b: a ^ b, '<<': lambda a, b: a << b, '>>': lambda a, b: a >> b, - } -for key, impl in BIN_OP_IMPL.items(): + } +for key, impl in list(BIN_OP_IMPL.items()): BIN_OP_IMPL[key+'='] = impl def const_infer_binary_op(self, operator, other, context): @@ -133,6 +154,25 @@ def dict_infer_binary_op(self, operator, other, context): # XXX else log TypeError nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op) +def instance_infer_binary_op(self, operator, other, context): + try: + methods = self.getattr(BIN_OP_METHOD[operator]) + except (NotFoundError, KeyError): + # Unknown operator + yield YES + else: + for method in methods: + if not isinstance(method, nodes.Function): + continue + for result in method.infer_call_result(self, context): + if result is not YES: + yield result + # We are interested only in the first infered method, + # don't go looking in the rest of the methods of the ancestors. + break + +Instance.infer_binary_op = yes_if_nothing_infered(instance_infer_binary_op) + # assignment ################################################################## @@ -166,7 +206,7 @@ def _resolve_looppart(parts, asspath, context): assigned = stmt.getitem(index, context) except (AttributeError, IndexError): continue - except TypeError, exc: # stmt is unsubscriptable Const + except TypeError: # stmt is unsubscriptable Const continue if not asspath: # we achieved to resolved the assignment path, @@ -231,10 +271,14 @@ def _arguments_infer_argname(self, name, context): yield self.parent.parent.frame() return if name == self.vararg: - yield const_factory(()) + vararg = const_factory(()) + vararg.parent = self + yield vararg return if name == self.kwarg: - yield const_factory({}) + kwarg = const_factory({}) + kwarg.parent = self + yield kwarg return # if there is a default value, yield it. And then yield YES to reflect # we can't guess given argument value @@ -253,11 +297,8 @@ def arguments_assigned_stmts(self, node, context, asspath=None): callcontext = context.callcontext context = copy_context(context) context.callcontext = None - for infered in callcontext.infer_argument(self.parent, node.name, context): - yield infered - return - for infered in _arguments_infer_argname(self, node.name, context): - yield infered + return callcontext.infer_argument(self.parent, node.name, context) + return _arguments_infer_argname(self, node.name, context) nodes.Arguments.assigned_stmts = arguments_assigned_stmts @@ -320,3 +361,55 @@ def with_assigned_stmts(self, node, context=None, asspath=None): nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts) +def starred_assigned_stmts(self, node=None, context=None, asspath=None): + stmt = self.statement() + if not isinstance(stmt, (nodes.Assign, nodes.For)): + raise InferenceError() + + if isinstance(stmt, nodes.Assign): + value = stmt.value + lhs = stmt.targets[0] + + if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: + # Too many starred arguments in the expression. + raise InferenceError() + + if context is None: + context = InferenceContext() + try: + rhs = next(value.infer(context)) + except InferenceError: + yield YES + return + if rhs is YES or not hasattr(rhs, 'elts'): + # Not interested in inferred values without elts. + yield YES + return + + elts = collections.deque(rhs.elts[:]) + if len(lhs.elts) > len(rhs.elts): + # a, *b, c = (1, 2) + raise InferenceError() + + # Unpack iteratively the values from the rhs of the assignment, + # until the find the starred node. What will remain will + # be the list of values which the Starred node will represent + # This is done in two steps, from left to right to remove + # anything before the starred node and from right to left + # to remvoe anything after the starred node. + + for index, node in enumerate(lhs.elts): + if not isinstance(node, nodes.Starred): + elts.popleft() + continue + lhs_elts = collections.deque(reversed(lhs.elts[index:])) + for node in lhs_elts: + if not isinstance(node, nodes.Starred): + elts.pop() + continue + # We're done + for elt in elts: + yield elt + break + +nodes.Starred.assigned_stmts = starred_assigned_stmts diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py b/pymode/libs/astroid/raw_building.py similarity index 92% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py rename to pymode/libs/astroid/raw_building.py index bb685a9e..99a026a7 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/raw_building.py +++ b/pymode/libs/astroid/raw_building.py @@ -25,10 +25,11 @@ from os.path import abspath from inspect import (getargspec, isdatadescriptor, isfunction, ismethod, ismethoddescriptor, isclass, isbuiltin, ismodule) +import six from astroid.node_classes import CONST_CLS from astroid.nodes import (Module, Class, Const, const_factory, From, - Function, EmptyNode, Name, Arguments) + Function, EmptyNode, Name, Arguments) from astroid.bases import BUILTINS, Generator from astroid.manager import AstroidManager MANAGER = AstroidManager() @@ -57,7 +58,10 @@ def attach_dummy_node(node, name, object=_marker): enode.object = object _attach_local_node(node, enode, name) -EmptyNode.has_underlying_object = lambda self: self.object is not _marker +def _has_underlying_object(self): + return hasattr(self, 'object') and self.object is not _marker + +EmptyNode.has_underlying_object = _has_underlying_object def attach_const_node(node, name, value): """create a Const node and register it in the locals of the given @@ -150,7 +154,7 @@ def object_build_function(node, member, localname): if varkw is not None: args.append(varkw) func = build_function(getattr(member, '__name__', None) or localname, args, - defaults, member.func_code.co_flags, member.__doc__) + defaults, six.get_function_code(member).co_flags, member.__doc__) node.add_local_node(func, localname) def object_build_datadescriptor(node, member, name): @@ -247,10 +251,11 @@ def object_build(self, node, obj): attach_dummy_node(node, name) continue if ismethod(member): - member = member.im_func + member = six.get_method_function(member) if isfunction(member): # verify this is not an imported function - filename = getattr(member.func_code, 'co_filename', None) + filename = getattr(six.get_function_code(member), + 'co_filename', None) if filename is None: assert isinstance(member, object) object_build_methoddescriptor(node, member, name) @@ -258,11 +263,9 @@ def object_build(self, node, obj): attach_dummy_node(node, name, member) else: object_build_function(node, member, name) - elif isbuiltin(member): + elif isbuiltin(member): if (not _io_discrepancy(member) and - self.imported_member(node, member, name)): - #if obj is object: - # print 'skippp', obj, name, member + self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) elif isclass(member): @@ -299,7 +302,7 @@ def imported_member(self, node, member, name): modname = getattr(member, '__module__', None) except: # XXX use logging - print 'unexpected error while building astroid from living object' + print('unexpected error while building astroid from living object') import traceback traceback.print_exc() modname = None @@ -325,16 +328,18 @@ def imported_member(self, node, member, name): return False -### astroid boot strapping ################################################### ### +### astroid bootstrapping ###################################################### Astroid_BUILDER = InspectBuilder() _CONST_PROXY = {} -def astroid_boot_strapping(): +def _astroid_bootstrapping(astroid_builtin=None): """astroid boot strapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const - from logilab.common.compat import builtins - astroid_builtin = Astroid_BUILDER.inspect_build(builtins) + if astroid_builtin is None: + from logilab.common.compat import builtins + astroid_builtin = Astroid_BUILDER.inspect_build(builtins) + for cls, node_cls in CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') @@ -346,7 +351,7 @@ def astroid_boot_strapping(): else: _CONST_PROXY[cls] = proxy -astroid_boot_strapping() +_astroid_bootstrapping() # TODO : find a nicer way to handle this situation; # However __proxied introduced an diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py b/pymode/libs/astroid/rebuilder.py similarity index 87% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py rename to pymode/libs/astroid/rebuilder.py index 40a614f8..013479a8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/rebuilder.py +++ b/pymode/libs/astroid/rebuilder.py @@ -20,10 +20,10 @@ """ import sys -from warnings import warn -from _ast import (Expr as Discard, Str, Name, Attribute, +from _ast import ( + Expr as Discard, Str, # binary operators - Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, + Add, BinOp, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, LShift, RShift, # logical operators And, Or, @@ -34,6 +34,7 @@ ) from astroid import nodes as new +from astroid import astpeephole _BIN_OP_CLASSES = {Add: '+', @@ -47,15 +48,18 @@ Pow: '**', Sub: '-', LShift: '<<', - RShift: '>>'} + RShift: '>>', + } _BOOL_OP_CLASSES = {And: 'and', - Or: 'or'} + Or: 'or', + } _UNARY_OP_CLASSES = {UAdd: '+', USub: '-', Not: 'not', - Invert: '~'} + Invert: '~', + } _CMP_OP_CLASSES = {Eq: '==', Gt: '>', @@ -66,11 +70,13 @@ Lt: '<', LtE: '<=', NotEq: '!=', - NotIn: 'not in'} + NotIn: 'not in', + } CONST_NAME_TRANSFORMS = {'None': None, 'True': True, - 'False': False} + 'False': False, + } REDIRECT = {'arguments': 'Arguments', 'Attribute': 'Getattr', @@ -86,7 +92,7 @@ 'ImportFrom': 'From', 'keyword': 'Keyword', 'Repr': 'Backquote', - } + } PY3K = sys.version_info >= (3, 0) PY34 = sys.version_info >= (3, 4) @@ -94,7 +100,6 @@ def _init_set_doc(node, newnode): newnode.doc = None try: if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str): - newnode.tolineno = node.body[0].lineno newnode.doc = node.body[0].value.s node.body = node.body[1:] @@ -103,10 +108,8 @@ def _init_set_doc(node, newnode): def _lineno_parent(oldnode, newnode, parent): newnode.parent = parent - if hasattr(oldnode, 'lineno'): - newnode.lineno = oldnode.lineno - if hasattr(oldnode, 'col_offset'): - newnode.col_offset = oldnode.col_offset + newnode.lineno = oldnode.lineno + newnode.col_offset = oldnode.col_offset def _set_infos(oldnode, newnode, parent): newnode.parent = parent @@ -114,20 +117,12 @@ def _set_infos(oldnode, newnode, parent): newnode.lineno = oldnode.lineno if hasattr(oldnode, 'col_offset'): newnode.col_offset = oldnode.col_offset - newnode.set_line_info(newnode.last_child()) # set_line_info accepts None - -def _infer_metaclass(node): - if isinstance(node, Name): - return node.id - elif isinstance(node, Attribute): - return node.attr def _create_yield_node(node, parent, rebuilder, factory): newnode = factory() _lineno_parent(node, newnode, parent) if node.value is not None: newnode.value = rebuilder.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode @@ -137,21 +132,21 @@ class TreeRebuilder(object): def __init__(self, manager): self._manager = manager self.asscontext = None - self._metaclass = [''] self._global_names = [] self._from_nodes = [] self._delayed_assattr = [] self._visit_meths = {} self._transform = manager.transform + self._peepholer = astpeephole.ASTPeepholeOptimizer() - def visit_module(self, node, modname, package): + def visit_module(self, node, modname, modpath, package): """visit a Module node by returning a fresh instance of it""" newnode = new.Module(modname, None) newnode.package = package - _lineno_parent(node, newnode, parent=None) + newnode.parent = None _init_set_doc(node, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) + newnode.file = newnode.path = modpath return self._transform(newnode) def visit(self, node, parent): @@ -176,7 +171,7 @@ def _save_assignment(self, node, name=None): def visit_arguments(self, node, parent): """visit a Arguments node by returning a fresh instance of it""" newnode = new.Arguments() - _lineno_parent(node, newnode, parent) + newnode.parent = parent self.asscontext = "Ass" newnode.args = [self.visit(child, newnode) for child in node.args] self.asscontext = None @@ -186,10 +181,25 @@ def visit_arguments(self, node, parent): vararg, kwarg = node.vararg, node.kwarg # change added in 82732 (7c5c678e4164), vararg and kwarg # are instances of `_ast.arg`, not strings - if vararg and PY34: - vararg = vararg.arg - if kwarg and PY34: - kwarg = kwarg.arg + if vararg: + if PY34: + if vararg.annotation: + newnode.varargannotation = self.visit(vararg.annotation, + newnode) + vararg = vararg.arg + elif PY3K and node.varargannotation: + newnode.varargannotation = self.visit(node.varargannotation, + newnode) + if kwarg: + if PY34: + if kwarg.annotation: + newnode.kwargannotation = self.visit(kwarg.annotation, + newnode) + kwarg = kwarg.arg + elif PY3K: + if node.kwargannotation: + newnode.kwargannotation = self.visit(node.kwargannotation, + newnode) newnode.vararg = vararg newnode.kwarg = kwarg # save argument names in locals: @@ -197,7 +207,6 @@ def visit_arguments(self, node, parent): newnode.parent.set_local(vararg, newnode) if kwarg: newnode.parent.set_local(kwarg, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_assattr(self, node, parent): @@ -208,7 +217,6 @@ def visit_assattr(self, node, parent): newnode.expr = self.visit(node.expr, newnode) self.asscontext = assc self._delayed_assattr.append(newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_assert(self, node, parent): @@ -218,7 +226,6 @@ def visit_assert(self, node, parent): newnode.test = self.visit(node.test, newnode) if node.msg is not None: newnode.fail = self.visit(node.msg, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_assign(self, node, parent): @@ -232,8 +239,8 @@ def visit_assign(self, node, parent): # set some function or metaclass infos XXX explain ? klass = newnode.parent.frame() if (isinstance(klass, new.Class) - and isinstance(newnode.value, new.CallFunc) - and isinstance(newnode.value.func, new.Name)): + and isinstance(newnode.value, new.CallFunc) + and isinstance(newnode.value.func, new.Name)): func_name = newnode.value.func.name for ass_node in newnode.targets: try: @@ -246,10 +253,6 @@ def visit_assign(self, node, parent): meth.extra_decorators.append(newnode.value) except (AttributeError, KeyError): continue - elif getattr(newnode.targets[0], 'name', None) == '__metaclass__': - # XXX check more... - self._metaclass[-1] = _infer_metaclass(node.value) - newnode.set_line_info(newnode.last_child()) return newnode def visit_assname(self, node, parent, node_name=None): @@ -269,7 +272,6 @@ def visit_augassign(self, node, parent): newnode.target = self.visit(node.target, newnode) self.asscontext = None newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_backquote(self, node, parent): @@ -277,17 +279,33 @@ def visit_backquote(self, node, parent): newnode = new.Backquote() _lineno_parent(node, newnode, parent) newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_binop(self, node, parent): """visit a BinOp node by returning a fresh instance of it""" + if isinstance(node.left, BinOp) and self._manager.optimize_ast: + # Optimize BinOp operations in order to remove + # redundant recursion. For instance, if the + # following code is parsed in order to obtain + # its ast, then the rebuilder will fail with an + # infinite recursion, the same will happen with the + # inference engine as well. There's no need to hold + # so many objects for the BinOp if they can be reduced + # to something else (also, the optimization + # might handle only Const binops, which isn't a big + # problem for the correctness of the program). + # + # ("a" + "b" + # one thousand more + "c") + newnode = self._peepholer.optimize_binop(node) + if newnode: + _lineno_parent(node, newnode, parent) + return newnode + newnode = new.BinOp() _lineno_parent(node, newnode, parent) newnode.left = self.visit(node.left, newnode) newnode.right = self.visit(node.right, newnode) newnode.op = _BIN_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) return newnode def visit_boolop(self, node, parent): @@ -296,7 +314,6 @@ def visit_boolop(self, node, parent): _lineno_parent(node, newnode, parent) newnode.values = [self.visit(child, newnode) for child in node.values] newnode.op = _BOOL_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) return newnode def visit_break(self, node, parent): @@ -315,13 +332,12 @@ def visit_callfunc(self, node, parent): newnode.starargs = self.visit(node.starargs, newnode) if node.kwargs is not None: newnode.kwargs = self.visit(node.kwargs, newnode) - newnode.args.extend(self.visit(child, newnode) for child in node.keywords) - newnode.set_line_info(newnode.last_child()) + for child in node.keywords: + newnode.args.append(self.visit(child, newnode)) return newnode def visit_class(self, node, parent): """visit a Class node to become astroid""" - self._metaclass.append(self._metaclass[-1]) newnode = new.Class(node.name, None) _lineno_parent(node, newnode, parent) _init_set_doc(node, newnode) @@ -329,15 +345,6 @@ def visit_class(self, node, parent): newnode.body = [self.visit(child, newnode) for child in node.body] if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6 newnode.decorators = self.visit_decorators(node, newnode) - newnode.set_line_info(newnode.last_child()) - metaclass = self._metaclass.pop() - if PY3K: - newnode._newstyle = True - else: - if not newnode.bases: - # no base classes, detect new / style old style according to - # current scope - newnode._newstyle = metaclass in ('type', 'ABCMeta') newnode.parent.frame().set_local(newnode.name, newnode) return newnode @@ -359,20 +366,18 @@ def visit_compare(self, node, parent): _lineno_parent(node, newnode, parent) newnode.left = self.visit(node.left, newnode) newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode)) - for (op, expr) in zip(node.ops, node.comparators)] - newnode.set_line_info(newnode.last_child()) + for (op, expr) in zip(node.ops, node.comparators)] return newnode def visit_comprehension(self, node, parent): """visit a Comprehension node by returning a fresh instance of it""" newnode = new.Comprehension() - _lineno_parent(node, newnode, parent) + newnode.parent = parent self.asscontext = "Ass" newnode.target = self.visit(node.target, newnode) self.asscontext = None newnode.iter = self.visit(node.iter, newnode) newnode.ifs = [self.visit(child, newnode) for child in node.ifs] - newnode.set_line_info(newnode.last_child()) return newnode def visit_decorators(self, node, parent): @@ -384,9 +389,8 @@ def visit_decorators(self, node, parent): if 'decorators' in node._fields: # py < 2.6, i.e. 2.5 decorators = node.decorators else: - decorators= node.decorator_list + decorators = node.decorator_list newnode.nodes = [self.visit(child, newnode) for child in decorators] - newnode.set_line_info(newnode.last_child()) return newnode def visit_delete(self, node, parent): @@ -396,7 +400,6 @@ def visit_delete(self, node, parent): self.asscontext = "Del" newnode.targets = [self.visit(child, newnode) for child in node.targets] self.asscontext = None - newnode.set_line_info(newnode.last_child()) return newnode def visit_dict(self, node, parent): @@ -404,8 +407,7 @@ def visit_dict(self, node, parent): newnode = new.Dict() _lineno_parent(node, newnode, parent) newnode.items = [(self.visit(key, newnode), self.visit(value, newnode)) - for key, value in zip(node.keys, node.values)] - newnode.set_line_info(newnode.last_child()) + for key, value in zip(node.keys, node.values)] return newnode def visit_dictcomp(self, node, parent): @@ -416,7 +418,6 @@ def visit_dictcomp(self, node, parent): newnode.value = self.visit(node.value, newnode) newnode.generators = [self.visit(child, newnode) for child in node.generators] - newnode.set_line_info(newnode.last_child()) return newnode def visit_discard(self, node, parent): @@ -424,7 +425,6 @@ def visit_discard(self, node, parent): newnode = new.Discard() _lineno_parent(node, newnode, parent) newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_ellipsis(self, node, parent): @@ -451,7 +451,6 @@ def visit_excepthandler(self, node, parent): newnode.name = self.visit(node.name, newnode) self.asscontext = None newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) return newnode def visit_exec(self, node, parent): @@ -463,15 +462,13 @@ def visit_exec(self, node, parent): newnode.globals = self.visit(node.globals, newnode) if node.locals is not None: newnode.locals = self.visit(node.locals, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_extslice(self, node, parent): """visit an ExtSlice node by returning a fresh instance of it""" newnode = new.ExtSlice() - _lineno_parent(node, newnode, parent) + newnode.parent = parent newnode.dims = [self.visit(dim, newnode) for dim in node.dims] - newnode.set_line_info(newnode.last_child()) return newnode def visit_for(self, node, parent): @@ -484,7 +481,6 @@ def visit_for(self, node, parent): newnode.iter = self.visit(node.iter, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) return newnode def visit_from(self, node, parent): @@ -511,7 +507,8 @@ def visit_function(self, node, parent): decorators = getattr(node, attr) if decorators: newnode.decorators = self.visit_decorators(node, newnode) - newnode.set_line_info(newnode.last_child()) + if PY3K and node.returns: + newnode.returns = self.visit(node.returns, newnode) self._global_names.pop() frame = newnode.parent.frame() if isinstance(frame, new.Class): @@ -535,7 +532,6 @@ def visit_genexpr(self, node, parent): _lineno_parent(node, newnode, parent) newnode.elt = self.visit(node.elt, newnode) newnode.generators = [self.visit(child, newnode) for child in node.generators] - newnode.set_line_info(newnode.last_child()) return newnode def visit_getattr(self, node, parent): @@ -555,7 +551,6 @@ def visit_getattr(self, node, parent): newnode.expr = self.visit(node.value, newnode) self.asscontext = asscontext newnode.attrname = node.attr - newnode.set_line_info(newnode.last_child()) return newnode def visit_global(self, node, parent): @@ -574,7 +569,6 @@ def visit_if(self, node, parent): newnode.test = self.visit(node.test, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) return newnode def visit_ifexp(self, node, parent): @@ -584,7 +578,6 @@ def visit_ifexp(self, node, parent): newnode.test = self.visit(node.test, newnode) newnode.body = self.visit(node.body, newnode) newnode.orelse = self.visit(node.orelse, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_import(self, node, parent): @@ -601,18 +594,16 @@ def visit_import(self, node, parent): def visit_index(self, node, parent): """visit a Index node by returning a fresh instance of it""" newnode = new.Index() - _lineno_parent(node, newnode, parent) + newnode.parent = parent newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_keyword(self, node, parent): """visit a Keyword node by returning a fresh instance of it""" newnode = new.Keyword() - _lineno_parent(node, newnode, parent) + newnode.parent = parent newnode.arg = node.arg newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_lambda(self, node, parent): @@ -621,7 +612,6 @@ def visit_lambda(self, node, parent): _lineno_parent(node, newnode, parent) newnode.args = self.visit(node.args, newnode) newnode.body = self.visit(node.body, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_list(self, node, parent): @@ -629,7 +619,6 @@ def visit_list(self, node, parent): newnode = new.List() _lineno_parent(node, newnode, parent) newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) return newnode def visit_listcomp(self, node, parent): @@ -639,7 +628,6 @@ def visit_listcomp(self, node, parent): newnode.elt = self.visit(node.elt, newnode) newnode.generators = [self.visit(child, newnode) for child in node.generators] - newnode.set_line_info(newnode.last_child()) return newnode def visit_name(self, node, parent): @@ -662,7 +650,6 @@ def visit_name(self, node, parent): # XXX REMOVE me : if self.asscontext in ('Del', 'Ass'): # 'Aug' ?? self._save_assignment(newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_bytes(self, node, parent): @@ -697,7 +684,6 @@ def visit_print(self, node, parent): if node.dest is not None: newnode.dest = self.visit(node.dest, newnode) newnode.values = [self.visit(child, newnode) for child in node.values] - newnode.set_line_info(newnode.last_child()) return newnode def visit_raise(self, node, parent): @@ -710,7 +696,6 @@ def visit_raise(self, node, parent): newnode.inst = self.visit(node.inst, newnode) if node.tback is not None: newnode.tback = self.visit(node.tback, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_return(self, node, parent): @@ -719,7 +704,6 @@ def visit_return(self, node, parent): _lineno_parent(node, newnode, parent) if node.value is not None: newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_set(self, node, parent): @@ -727,7 +711,6 @@ def visit_set(self, node, parent): newnode = new.Set() _lineno_parent(node, newnode, parent) newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) return newnode def visit_setcomp(self, node, parent): @@ -737,20 +720,18 @@ def visit_setcomp(self, node, parent): newnode.elt = self.visit(node.elt, newnode) newnode.generators = [self.visit(child, newnode) for child in node.generators] - newnode.set_line_info(newnode.last_child()) return newnode def visit_slice(self, node, parent): """visit a Slice node by returning a fresh instance of it""" newnode = new.Slice() - _lineno_parent(node, newnode, parent) + newnode.parent = parent if node.lower is not None: newnode.lower = self.visit(node.lower, newnode) if node.upper is not None: newnode.upper = self.visit(node.upper, newnode) if node.step is not None: newnode.step = self.visit(node.step, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_subscript(self, node, parent): @@ -761,7 +742,6 @@ def visit_subscript(self, node, parent): newnode.value = self.visit(node.value, newnode) newnode.slice = self.visit(node.slice, newnode) self.asscontext = subcontext - newnode.set_line_info(newnode.last_child()) return newnode def visit_tryexcept(self, node, parent): @@ -771,7 +751,6 @@ def visit_tryexcept(self, node, parent): newnode.body = [self.visit(child, newnode) for child in node.body] newnode.handlers = [self.visit(child, newnode) for child in node.handlers] newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) return newnode def visit_tryfinally(self, node, parent): @@ -780,7 +759,6 @@ def visit_tryfinally(self, node, parent): _lineno_parent(node, newnode, parent) newnode.body = [self.visit(child, newnode) for child in node.body] newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody] - newnode.set_line_info(newnode.last_child()) return newnode def visit_tuple(self, node, parent): @@ -788,7 +766,6 @@ def visit_tuple(self, node, parent): newnode = new.Tuple() _lineno_parent(node, newnode, parent) newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) return newnode def visit_unaryop(self, node, parent): @@ -797,7 +774,6 @@ def visit_unaryop(self, node, parent): _lineno_parent(node, newnode, parent) newnode.operand = self.visit(node.operand, newnode) newnode.op = _UNARY_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) return newnode def visit_while(self, node, parent): @@ -807,7 +783,6 @@ def visit_while(self, node, parent): newnode.test = self.visit(node.test, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) return newnode def visit_with(self, node, parent): @@ -822,7 +797,6 @@ def visit_with(self, node, parent): self.asscontext = None newnode.items = [(expr, vars)] newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) return newnode def visit_yield(self, node, parent): @@ -850,6 +824,9 @@ def visit_arguments(self, node, parent): newnode.kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs] self.asscontext = None newnode.kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults] + newnode.annotations = [ + self.visit(arg.annotation, newnode) if arg.annotation else None + for arg in node.args] return newnode def visit_excepthandler(self, node, parent): @@ -861,7 +838,6 @@ def visit_excepthandler(self, node, parent): if node.name is not None: newnode.name = self.visit_assname(node, newnode, node.name) newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) return newnode def visit_nonlocal(self, node, parent): @@ -879,7 +855,6 @@ def visit_raise(self, node, parent): newnode.exc = self.visit(node.exc, newnode) if node.cause is not None: newnode.cause = self.visit(node.cause, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_starred(self, node, parent): @@ -887,7 +862,6 @@ def visit_starred(self, node, parent): newnode = new.Starred() _lineno_parent(node, newnode, parent) newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) return newnode def visit_try(self, node, parent): @@ -902,7 +876,6 @@ def visit_try(self, node, parent): excnode.body = [self.visit(child, excnode) for child in node.body] excnode.handlers = [self.visit(child, excnode) for child in node.handlers] excnode.orelse = [self.visit(child, excnode) for child in node.orelse] - excnode.set_line_info(excnode.last_child()) newnode.body = [excnode] else: newnode.body = [self.visit(child, newnode) for child in node.body] @@ -912,7 +885,6 @@ def visit_try(self, node, parent): newnode.body = [self.visit(child, newnode) for child in node.body] newnode.handlers = [self.visit(child, newnode) for child in node.handlers] newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) return newnode def visit_with(self, node, parent): @@ -934,7 +906,6 @@ def visit_child(child): newnode.items = [visit_child(child) for child in node.items] newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) return newnode def visit_yieldfrom(self, node, parent): @@ -942,6 +913,7 @@ def visit_yieldfrom(self, node, parent): def visit_class(self, node, parent): newnode = super(TreeRebuilder3k, self).visit_class(node, parent) + newnode._newstyle = True for keyword in node.keywords: if keyword.arg == 'metaclass': newnode._metaclass = self.visit(keyword, newnode).value diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py b/pymode/libs/astroid/scoped_nodes.py similarity index 67% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py rename to pymode/libs/astroid/scoped_nodes.py index 889baa0e..ac90f878 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/scoped_nodes.py +++ b/pymode/libs/astroid/scoped_nodes.py @@ -24,27 +24,72 @@ __doctype__ = "restructuredtext en" import sys +import warnings from itertools import chain try: from io import BytesIO except ImportError: from cStringIO import StringIO as BytesIO +import six from logilab.common.compat import builtins from logilab.common.decorators import cached, cachedproperty from astroid.exceptions import NotFoundError, \ - AstroidBuildingException, InferenceError + AstroidBuildingException, InferenceError, ResolveError from astroid.node_classes import Const, DelName, DelAttr, \ Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \ - LookupMixIn, const_factory as cf, unpack_infer, Name -from astroid.bases import NodeNG, InferenceContext, Instance,\ - YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \ + LookupMixIn, const_factory as cf, unpack_infer, CallFunc +from astroid.bases import NodeNG, InferenceContext, Instance, copy_context, \ + YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, \ BUILTINS from astroid.mixins import FilterStmtsMixin from astroid.bases import Statement from astroid.manager import AstroidManager +ITER_METHODS = ('__iter__', '__getitem__') +PY3K = sys.version_info >= (3, 0) + +def _c3_merge(sequences): + """Merges MROs in *sequences* to a single MRO using the C3 algorithm. + + Adapted from http://www.python.org/download/releases/2.3/mro/. + + """ + result = [] + while True: + sequences = [s for s in sequences if s] # purge empty sequences + if not sequences: + return result + for s1 in sequences: # find merge candidates among seq heads + candidate = s1[0] + for s2 in sequences: + if candidate in s2[1:]: + candidate = None + break # reject the current head, it appears later + else: + break + if not candidate: + # Show all the remaining bases, which were considered as + # candidates for the next mro sequence. + bases = ["({})".format(", ".join(base.name + for base in subsequence)) + for subsequence in sequences] + raise ResolveError("Cannot create a consistent method resolution " + "order for bases %s" % ", ".join(bases)) + result.append(candidate) + # remove the chosen candidate + for seq in sequences: + if seq[0] == candidate: + del seq[0] + + +def _verify_duplicates_mro(sequences): + for sequence in sequences: + names = [node.qname() for node in sequence] + if len(names) != len(set(names)): + raise ResolveError('Duplicates found in the mro.') + def remove_nodes(func, cls): def wrapper(*args, **kwargs): @@ -188,7 +233,7 @@ def keys(self): """method from the `dict` interface returning a tuple containing locally defined names """ - return self.locals.keys() + return list(self.locals.keys()) def values(self): """method from the `dict` interface returning a tuple containing @@ -201,7 +246,7 @@ def items(self): containing each locally defined name with its associated node, which is an instance of `Function` or `Class` """ - return zip(self.keys(), self.values()) + return list(zip(self.keys(), self.values())) def __contains__(self, name): @@ -253,14 +298,37 @@ def __init__(self, name, doc, pure_python=True): self.body = [] self.future_imports = set() - @property - def file_stream(self): + def _get_stream(self): if self.file_bytes is not None: return BytesIO(self.file_bytes) if self.file is not None: - return open(self.file, 'rb') + stream = open(self.file, 'rb') + return stream return None + @property + def file_stream(self): + warnings.warn("file_stream property is deprecated and " + "it is slated for removal in astroid 1.6." + "Use the new method 'stream' instead.", + PendingDeprecationWarning, + stacklevel=2) + return self._get_stream() + + def stream(self): + """Get a stream to the underlying file or bytes.""" + return self._get_stream() + + def close(self): + """Close the underlying file streams.""" + warnings.warn("close method is deprecated and it is " + "slated for removal in astroid 1.6, along " + "with 'file_stream' property. " + "Its behaviour is replaced by managing each " + "file stream returned by the 'stream' method.", + PendingDeprecationWarning, + stacklevel=2) + def block_range(self, lineno): """return block line numbers. @@ -336,13 +404,17 @@ def next_sibling(self): return if sys.version_info < (2, 8): - def absolute_import_activated(self): + @cachedproperty + def _absolute_import_activated(self): for stmt in self.locals.get('absolute_import', ()): if isinstance(stmt, From) and stmt.modname == '__future__': return True return False else: - absolute_import_activated = lambda self: True + _absolute_import_activated = True + + def absolute_import_activated(self): + return self._absolute_import_activated def import_module(self, modname, relative_only=False, level=None): """import the given module considering self as context""" @@ -405,24 +477,43 @@ def wildcard_import_names(self): # # We separate the different steps of lookup in try/excepts # to avoid catching too many Exceptions - # However, we can not analyse dynamically constructed __all__ + default = [name for name in self.keys() if not name.startswith('_')] try: all = self['__all__'] except KeyError: - return [name for name in self.keys() if not name.startswith('_')] + return default try: - explicit = all.assigned_stmts().next() + explicit = next(all.assigned_stmts()) except InferenceError: - return [name for name in self.keys() if not name.startswith('_')] + return default except AttributeError: # not an assignment node # XXX infer? - return [name for name in self.keys() if not name.startswith('_')] + return default + + # Try our best to detect the exported name. + infered = [] try: - # should be a Tuple/List of constant string / 1 string not allowed - return [const.value for const in explicit.elts] - except AttributeError: - return [name for name in self.keys() if not name.startswith('_')] + explicit = next(explicit.infer()) + except InferenceError: + return default + if not isinstance(explicit, (Tuple, List)): + return default + + str_const = lambda node: (isinstance(node, Const) and + isinstance(node.value, six.string_types)) + for node in explicit.elts: + if str_const(node): + infered.append(node.value) + else: + try: + infered_node = next(node.infer()) + except InferenceError: + continue + if str_const(infered_node): + infered.append(infered_node.value) + return infered + class ComprehensionScope(LocalsDictNodeNG): @@ -476,7 +567,31 @@ class ListComp(_ListComp): """class representing a ListComp node""" # Function ################################################################### - + +def _infer_decorator_callchain(node): + """Detect decorator call chaining and see if the end result is a + static or a classmethod. + """ + if not isinstance(node, Function): + return + if not node.parent: + return + try: + # TODO: We don't handle multiple inference results right now, + # because there's no flow to reason when the return + # is what we are looking for, a static or a class method. + result = next(node.infer_call_result(node.parent)) + except (StopIteration, InferenceError): + return + if isinstance(result, Instance): + result = result._proxied + if isinstance(result, Class): + if result.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + if result.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' + + def _function_type(self): """ Function type, possible values are: @@ -487,20 +602,35 @@ def _function_type(self): # so do it here. if self.decorators: for node in self.decorators.nodes: - if not isinstance(node, Name): - continue + if isinstance(node, CallFunc): + # Handle the following case: + # @some_decorator(arg1, arg2) + # def func(...) + # + try: + current = next(node.func.infer()) + except InferenceError: + continue + _type = _infer_decorator_callchain(current) + if _type is not None: + return _type + try: for infered in node.infer(): + # Check to see if this returns a static or a class method. + _type = _infer_decorator_callchain(infered) + if _type is not None: + return _type + if not isinstance(infered, Class): continue for ancestor in infered.ancestors(): - if isinstance(ancestor, Class): - if (ancestor.name == 'classmethod' and - ancestor.root().name == BUILTINS): - return 'classmethod' - elif (ancestor.name == 'staticmethod' and - ancestor.root().name == BUILTINS): - return 'staticmethod' + if not isinstance(ancestor, Class): + continue + if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' except InferenceError: pass return self._type @@ -560,7 +690,11 @@ def scope_lookup(self, node, name, offset=0): class Function(Statement, Lambda): - _astroid_fields = ('decorators', 'args', 'body') + if PY3K: + _astroid_fields = ('decorators', 'args', 'body', 'returns') + returns = None + else: + _astroid_fields = ('decorators', 'args', 'body') special_attributes = set(('__name__', '__doc__', '__dict__')) is_function = True @@ -574,22 +708,25 @@ def __init__(self, name, doc): self.locals = {} self.args = [] self.body = [] - self.decorators = None self.name = name self.doc = doc self.extra_decorators = [] self.instance_attrs = {} - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - # lineno is the line number of the first decorator, we want the def statement lineno + @cachedproperty + def fromlineno(self): + # lineno is the line number of the first decorator, we want the def + # statement lineno + lineno = self.lineno if self.decorators is not None: - self.fromlineno += sum(node.tolineno - node.lineno + 1 + lineno += sum(node.tolineno - node.lineno + 1 for node in self.decorators.nodes) - if self.args.fromlineno < self.fromlineno: - self.args.fromlineno = self.fromlineno - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self.args.tolineno + + return lineno + + @cachedproperty + def blockstart_tolineno(self): + return self.args.tolineno def block_range(self, lineno): """return block line numbers. @@ -633,7 +770,7 @@ def is_bound(self): def is_abstract(self, pass_is_abstract=True): """Returns True if the method is abstract. - + A method is considered abstract if - the only statement is 'raise NotImplementedError', or - the only statement is 'pass' and pass_is_abstract is True, or @@ -642,7 +779,7 @@ def is_abstract(self, pass_is_abstract=True): if self.decorators: for node in self.decorators.nodes: try: - infered = node.infer().next() + infered = next(node.infer()) except InferenceError: continue if infered and infered.qname() in ('abc.abstractproperty', @@ -663,17 +800,33 @@ def is_abstract(self, pass_is_abstract=True): def is_generator(self): """return true if this is a generator function""" # XXX should be flagged, not computed - try: - return self.nodes_of_class((Yield, YieldFrom), - skip_klass=(Function, Lambda)).next() - except StopIteration: - return False + return next(self.nodes_of_class((Yield, YieldFrom), + skip_klass=(Function, Lambda)), False) def infer_call_result(self, caller, context=None): """infer what a function is returning when called""" if self.is_generator(): yield Generator() return + # This is really a gigantic hack to work around metaclass generators + # that return transient class-generating functions. Pylint's AST structure + # cannot handle a base class object that is only used for calling __new__, + # but does not contribute to the inheritance structure itself. We inject + # a fake class into the hierarchy here for several well-known metaclass + # generators, and filter it out later. + if (self.name == 'with_metaclass' and + len(self.args.args) == 1 and + self.args.vararg is not None): + metaclass = next(caller.args[0].infer(context)) + if isinstance(metaclass, Class): + c = Class('temporary_class', None) + c.hide = True + c.parent = self + bases = [next(b.infer(context)) for b in caller.args[1:]] + c.bases = [base for base in bases if base != YES] + c._metaclass = metaclass + yield c + return returns = self.nodes_of_class(Return, skip_klass=Function) for returnnode in returns: if returnnode.value is None: @@ -701,15 +854,21 @@ def _rec_get_names(args, names=None): # Class ###################################################################### -def _is_metaclass(klass): +def _is_metaclass(klass, seen=None): """ Return if the given class can be used as a metaclass. """ if klass.name == 'type': return True + if seen is None: + seen = set() for base in klass.bases: try: for baseobj in base.infer(): + if baseobj in seen: + continue + else: + seen.add(baseobj) if isinstance(baseobj, Instance): # not abstract return False @@ -721,7 +880,7 @@ def _is_metaclass(klass): continue if baseobj._type == 'metaclass': return True - if _is_metaclass(baseobj): + if _is_metaclass(baseobj, seen): return True except InferenceError: continue @@ -749,16 +908,15 @@ def _class_type(klass, ancestors=None): klass._type = 'class' return 'class' ancestors.add(klass) - # print >> sys.stderr, '_class_type', repr(klass) for base in klass.ancestors(recurs=False): name = _class_type(base, ancestors) if name != 'class': - if name == 'metaclass' and not _is_metaclass(klass): - # don't propagate it if the current class - # can't be a metaclass - continue - klass._type = base.type - break + if name == 'metaclass' and not _is_metaclass(klass): + # don't propagate it if the current class + # can't be a metaclass + continue + klass._type = base.type + break if klass._type is None: klass._type = 'class' return klass._type @@ -784,6 +942,8 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin): blockstart_tolineno = None _type = None + _metaclass_hack = False + hide = False type = property(_class_type, doc="class'type, possible values are 'class' | " "'metaclass' | 'interface' | 'exception'") @@ -805,6 +965,11 @@ def _newstyle_impl(self, context=None): if base._newstyle_impl(context): self._newstyle = True break + klass = self._explicit_metaclass() + # could be any callable, we'd need to infer the result of klass(name, + # bases, dict). punt if it's not a class node. + if klass is not None and isinstance(klass, Class): + self._newstyle = klass._newstyle_impl(context) if self._newstyle is None: self._newstyle = False return self._newstyle @@ -814,12 +979,12 @@ def _newstyle_impl(self, context=None): doc="boolean indicating if it's a new style class" "or not") - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno - if lastchild is not None: - self.tolineno = lastchild.tolineno - # else this is a class with only a docstring, then tolineno is (should be) already ok + @cachedproperty + def blockstart_tolineno(self): + if self.bases: + return self.bases[-1].tolineno + else: + return self.fromlineno def block_range(self, lineno): """return block line numbers. @@ -839,12 +1004,54 @@ def display_type(self): def callable(self): return True + def is_subtype_of(self, type_name, context=None): + if self.qname() == type_name: + return True + for anc in self.ancestors(context=context): + if anc.qname() == type_name: + return True + def infer_call_result(self, caller, context=None): """infer what a class is returning when called""" - yield Instance(self) + if self.is_subtype_of('%s.type' % (BUILTINS,), context) and len(caller.args) == 3: + name_node = next(caller.args[0].infer(context)) + if (isinstance(name_node, Const) and + isinstance(name_node.value, six.string_types)): + name = name_node.value + else: + yield YES + return + result = Class(name, None) + bases = next(caller.args[1].infer(context)) + if isinstance(bases, (Tuple, List)): + result.bases = bases.itered() + else: + # There is currently no AST node that can represent an 'unknown' + # node (YES is not an AST node), therefore we simply return YES here + # although we know at least the name of the class. + yield YES + return + result.parent = caller.parent + yield result + else: + yield Instance(self) def scope_lookup(self, node, name, offset=0): - if node in self.bases: + if any(node == base or base.parent_of(node) + for base in self.bases): + # Handle the case where we have either a name + # in the bases of a class, which exists before + # the actual definition or the case where we have + # a Getattr node, with that name. + # + # name = ... + # class A(name): + # def name(self): ... + # + # import name + # class A(name.Name): + # def name(self): ... + frame = self.parent.frame() # line offset to avoid that class A(A) resolve the ancestor to # the defined class @@ -868,11 +1075,15 @@ def ancestors(self, recurs=True, context=None): ancestors only """ # FIXME: should be possible to choose the resolution order - # XXX inference make infinite loops possible here (see BaseTransformer - # manipulation in the builder module for instance) + # FIXME: inference make infinite loops possible here yielded = set([self]) if context is None: context = InferenceContext() + if sys.version_info[0] >= 3: + if not self.bases and self.qname() != 'builtins.object': + yield builtin_lookup("object")[1][0] + return + for stmt in self.bases: with context.restore_path(): try: @@ -883,12 +1094,17 @@ def ancestors(self, recurs=True, context=None): else: # duh ? continue - if baseobj in yielded: - continue # cf xxx above - yielded.add(baseobj) - yield baseobj + if not baseobj.hide: + if baseobj in yielded: + continue # cf xxx above + yielded.add(baseobj) + yield baseobj if recurs: - for grandpa in baseobj.ancestors(True, context): + for grandpa in baseobj.ancestors(recurs=True, + context=context): + if grandpa is self: + # This class is the ancestor of itself. + break if grandpa in yielded: continue # cf xxx above yielded.add(grandpa) @@ -941,7 +1157,9 @@ def instance_attr(self, name, context=None): if no attribute with this name has been find in this class or its parent classes """ - values = self.instance_attrs.get(name, []) + # Return a copy, so we don't modify self.instance_attrs, + # which could lead to infinite loop. + values = list(self.instance_attrs.get(name, [])) # get all values from parents for class_node in self.instance_attr_ancestors(name, context): values += class_node.instance_attrs[name] @@ -1079,23 +1297,45 @@ def _explicit_metaclass(self): An explicit defined metaclass is defined either by passing the ``metaclass`` keyword argument - in the class definition line (Python 3) or by - having a ``__metaclass__`` class attribute. + in the class definition line (Python 3) or (Python 2) by + having a ``__metaclass__`` class attribute, or if there are + no explicit bases but there is a global ``__metaclass__`` variable. """ + for base in self.bases: + try: + for baseobj in base.infer(): + if isinstance(baseobj, Class) and baseobj.hide: + self._metaclass = baseobj._metaclass + self._metaclass_hack = True + break + except InferenceError: + pass + if self._metaclass: # Expects this from Py3k TreeRebuilder try: return next(node for node in self._metaclass.infer() if node is not YES) except (InferenceError, StopIteration): - return + return None + if sys.version_info >= (3, ): + return None + + if '__metaclass__' in self.locals: + assignment = self.locals['__metaclass__'][-1] + elif self.bases: + return None + elif '__metaclass__' in self.root().locals: + assignments = [ass for ass in self.root().locals['__metaclass__'] + if ass.lineno < self.lineno] + if not assignments: + return None + assignment = assignments[-1] + else: + return None try: - meta = self.getattr('__metaclass__')[0] - except NotFoundError: - return - try: - infered = meta.infer().next() + infered = next(assignment.infer()) except InferenceError: return if infered is YES: # don't expose this @@ -1116,3 +1356,129 @@ def metaclass(self): if klass is not None: break return klass + + def has_metaclass_hack(self): + return self._metaclass_hack + + def _islots(self): + """ Return an iterator with the inferred slots. """ + if '__slots__' not in self.locals: + return + for slots in self.igetattr('__slots__'): + # check if __slots__ is a valid type + for meth in ITER_METHODS: + try: + slots.getattr(meth) + break + except NotFoundError: + continue + else: + continue + + if isinstance(slots, Const): + # a string. Ignore the following checks, + # but yield the node, only if it has a value + if slots.value: + yield slots + continue + if not hasattr(slots, 'itered'): + # we can't obtain the values, maybe a .deque? + continue + + if isinstance(slots, Dict): + values = [item[0] for item in slots.items] + else: + values = slots.itered() + if values is YES: + continue + + for elt in values: + try: + for infered in elt.infer(): + if infered is YES: + continue + if (not isinstance(infered, Const) or + not isinstance(infered.value, + six.string_types)): + continue + if not infered.value: + continue + yield infered + except InferenceError: + continue + + # Cached, because inferring them all the time is expensive + @cached + def slots(self): + """Get all the slots for this node. + + If the class doesn't define any slot, through `__slots__` + variable, then this function will return a None. + Also, it will return None in the case the slots weren't inferred. + Otherwise, it will return a list of slot names. + """ + if not self.newstyle: + raise NotImplementedError( + "The concept of slots is undefined for old-style classes.") + + slots = self._islots() + try: + first = next(slots) + except StopIteration: + # The class doesn't have a __slots__ definition. + return None + return [first] + list(slots) + + def _inferred_bases(self, recurs=True, context=None): + # TODO(cpopa): really similar with .ancestors, + # but the difference is when one base is inferred, + # only the first object is wanted. That's because + # we aren't interested in superclasses, as in the following + # example: + # + # class SomeSuperClass(object): pass + # class SomeClass(SomeSuperClass): pass + # class Test(SomeClass): pass + # + # Inferring SomeClass from the Test's bases will give + # us both SomeClass and SomeSuperClass, but we are interested + # only in SomeClass. + + if context is None: + context = InferenceContext() + if sys.version_info[0] >= 3: + if not self.bases and self.qname() != 'builtins.object': + yield builtin_lookup("object")[1][0] + return + + for stmt in self.bases: + try: + baseobj = next(stmt.infer(context=context)) + except InferenceError: + # XXX log error ? + continue + if isinstance(baseobj, Instance): + baseobj = baseobj._proxied + if not isinstance(baseobj, Class): + continue + if not baseobj.hide: + yield baseobj + + def mro(self, context=None): + """Get the method resolution order, using C3 linearization. + + It returns the list of ancestors sorted by the mro. + This will raise `NotImplementedError` for old-style classes, since + they don't have the concept of MRO. + """ + if not self.newstyle: + raise NotImplementedError( + "Could not obtain mro for old-style classes.") + + bases = list(self._inferred_bases(context=context)) + unmerged_mro = ([[self]] + + [base.mro() for base in bases if base is not self] + + [bases]) + + _verify_duplicates_mro(unmerged_mro) + return _c3_merge(unmerged_mro) diff --git a/pymode/libs/astroid/test_utils.py b/pymode/libs/astroid/test_utils.py new file mode 100644 index 00000000..19bd7b96 --- /dev/null +++ b/pymode/libs/astroid/test_utils.py @@ -0,0 +1,218 @@ +"""Utility functions for test code that uses astroid ASTs as input.""" +import functools +import sys +import textwrap + +from astroid import nodes +from astroid import builder +# The name of the transient function that is used to +# wrap expressions to be extracted when calling +# extract_node. +_TRANSIENT_FUNCTION = '__' + +# The comment used to select a statement to be extracted +# when calling extract_node. +_STATEMENT_SELECTOR = '#@' + + +def _extract_expressions(node): + """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. + + The function walks the AST recursively to search for expressions that + are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an + expression, it completely removes the function call node from the tree, + replacing it by the wrapped expression inside the parent. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :yields: The sequence of wrapped expressions on the modified tree + expression can be found. + """ + if (isinstance(node, nodes.CallFunc) + and isinstance(node.func, nodes.Name) + and node.func.name == _TRANSIENT_FUNCTION): + real_expr = node.args[0] + real_expr.parent = node.parent + # Search for node in all _astng_fields (the fields checked when + # get_children is called) of its parent. Some of those fields may + # be lists or tuples, in which case the elements need to be checked. + # When we find it, replace it by real_expr, so that the AST looks + # like no call to _TRANSIENT_FUNCTION ever took place. + for name in node.parent._astroid_fields: + child = getattr(node.parent, name) + if isinstance(child, (list, tuple)): + for idx, compound_child in enumerate(child): + if compound_child is node: + child[idx] = real_expr + elif child is node: + setattr(node.parent, name, real_expr) + yield real_expr + else: + for child in node.get_children(): + for result in _extract_expressions(child): + yield result + + +def _find_statement_by_line(node, line): + """Extracts the statement on a specific line from an AST. + + If the line number of node matches line, it will be returned; + otherwise its children are iterated and the function is called + recursively. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :param line: The line number of the statement to extract. + :type line: int + :returns: The statement on the line, or None if no statement for the line + can be found. + :rtype: astroid.bases.NodeNG or None + """ + if isinstance(node, (nodes.Class, nodes.Function)): + # This is an inaccuracy in the AST: the nodes that can be + # decorated do not carry explicit information on which line + # the actual definition (class/def), but .fromline seems to + # be close enough. + node_line = node.fromlineno + else: + node_line = node.lineno + + if node_line == line: + return node + + for child in node.get_children(): + result = _find_statement_by_line(child, line) + if result: + return result + + return None + +def extract_node(code, module_name=''): + """Parses some Python code as a module and extracts a designated AST node. + + Statements: + To extract one or more statement nodes, append #@ to the end of the line + + Examples: + >>> def x(): + >>> def y(): + >>> return 1 #@ + + The return statement will be extracted. + + >>> class X(object): + >>> def meth(self): #@ + >>> pass + + The funcion object 'meth' will be extracted. + + Expressions: + To extract arbitrary expressions, surround them with the fake + function call __(...). After parsing, the surrounded expression + will be returned and the whole AST (accessible via the returned + node's parent attribute) will look like the function call was + never there in the first place. + + Examples: + >>> a = __(1) + + The const node will be extracted. + + >>> def x(d=__(foo.bar)): pass + + The node containing the default argument will be extracted. + + >>> def foo(a, b): + >>> return 0 < __(len(a)) < b + + The node containing the function call 'len' will be extracted. + + If no statements or expressions are selected, the last toplevel + statement will be returned. + + If the selected statement is a discard statement, (i.e. an expression + turned into a statement), the wrapped expression is returned instead. + + For convenience, singleton lists are unpacked. + + :param str code: A piece of Python code that is parsed as + a module. Will be passed through textwrap.dedent first. + :param str module_name: The name of the module. + :returns: The designated node from the parse tree, or a list of nodes. + :rtype: astroid.bases.NodeNG, or a list of nodes. + """ + def _extract(node): + if isinstance(node, nodes.Discard): + return node.value + else: + return node + + requested_lines = [] + for idx, line in enumerate(code.splitlines()): + if line.strip().endswith(_STATEMENT_SELECTOR): + requested_lines.append(idx + 1) + + tree = build_module(code, module_name=module_name) + extracted = [] + if requested_lines: + for line in requested_lines: + extracted.append(_find_statement_by_line(tree, line)) + + # Modifies the tree. + extracted.extend(_extract_expressions(tree)) + + if not extracted: + extracted.append(tree.body[-1]) + + extracted = [_extract(node) for node in extracted] + if len(extracted) == 1: + return extracted[0] + else: + return extracted + + +def build_module(code, module_name='', path=None): + """Parses a string module with a builder. + :param code: The code for the module. + :type code: str + :param module_name: The name for the module + :type module_name: str + :param path: The path for the module + :type module_name: str + :returns: The module AST. + :rtype: astroid.bases.NodeNG + """ + code = textwrap.dedent(code) + return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path) + + +def require_version(minver=None, maxver=None): + """ Compare version of python interpreter to the given one. Skip the test + if older. + """ + def parse(string, default=None): + string = string or default + try: + return tuple(int(v) for v in string.split('.')) + except ValueError: + raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) + + def check_require_version(f): + current = sys.version_info[:3] + if parse(minver, "0") < current <= parse(maxver, "4"): + return f + else: + str_version = '.'.join(str(v) for v in sys.version_info) + @functools.wraps(f) + def new_f(self, *args, **kwargs): + if minver is not None: + self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version)) + elif maxver is not None: + self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version)) + return new_f + + + return check_require_version + +def get_name_node(start_from, name, index=0): + return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index] diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/utils.py b/pymode/libs/astroid/utils.py similarity index 89% rename from pymode/libs/pylama/lint/pylama_pylint/astroid/utils.py rename to pymode/libs/astroid/utils.py index 1cd0e778..ae72a92c 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/utils.py +++ b/pymode/libs/astroid/utils.py @@ -18,6 +18,7 @@ """this module contains some utilities to navigate in the tree or to extract information from it """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -25,7 +26,7 @@ from astroid.builder import parse -class ASTWalker: +class ASTWalker(object): """a walker visiting a tree in preorder, calling on the handler: * visit_ on entering a node, where class name is the class of @@ -98,7 +99,7 @@ def visit(self, node): if methods[0] is not None: methods[0](node) if 'locals' in node.__dict__: # skip Instance and other proxy - for name, local_node in node.items(): + for local_node in node.values(): self.visit(local_node) if methods[1] is not None: return methods[1](node) @@ -109,20 +110,22 @@ def _check_children(node): for child in node.get_children(): ok = False if child is None: - print "Hm, child of %s is None" % node + print("Hm, child of %s is None" % node) continue if not hasattr(child, 'parent'): - print " ERROR: %s has child %s %x with no parent" % (node, child, id(child)) + print(" ERROR: %s has child %s %x with no parent" % ( + node, child, id(child))) elif not child.parent: - print " ERROR: %s has child %s %x with parent %r" % (node, child, id(child), child.parent) + print(" ERROR: %s has child %s %x with parent %r" % ( + node, child, id(child), child.parent)) elif child.parent is not node: - print " ERROR: %s %x has child %s %x with wrong parent %s" % (node, - id(node), child, id(child), child.parent) + print(" ERROR: %s %x has child %s %x with wrong parent %s" % ( + node, id(node), child, id(child), child.parent)) else: ok = True if not ok: - print "lines;", node.lineno, child.lineno - print "of module", node.root(), node.root().name + print("lines;", node.lineno, child.lineno) + print("of module", node.root(), node.root().name) raise AstroidBuildingException _check_children(child) @@ -145,7 +148,7 @@ class TreeTester(object): Module() body = [ Print() - dest = + dest = values = [ ] ] @@ -180,8 +183,8 @@ def _native_repr_tree(self, node, indent, _done=None): if _done is None: _done = set() if node in _done: - self._string += '\nloop in tree: %r (%s)' % (node, - getattr(node, 'lineno', None)) + self._string += '\nloop in tree: %r (%s)' % ( + node, getattr(node, 'lineno', None)) return _done.add(node) self._string += '\n' + indent + '<%s>' % node.__class__.__name__ @@ -197,7 +200,7 @@ def _native_repr_tree(self, node, indent, _done=None): continue if a in ("lineno", "col_offset") and not self.lineno: continue - self._string +='\n' + indent + a + " = " + repr(attr) + self._string += '\n' + indent + a + " = " + repr(attr) for field in node._fields or (): attr = node_dict[field] if attr is None: diff --git a/pymode/libs/easy_install.py b/pymode/libs/easy_install.py new file mode 100644 index 00000000..d87e9840 --- /dev/null +++ b/pymode/libs/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__init__.py b/pymode/libs/logilab/common/__init__.py similarity index 92% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/__init__.py rename to pymode/libs/logilab/common/__init__.py index 8d063e2c..fc01e4df 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__init__.py +++ b/pymode/libs/logilab/common/__init__.py @@ -25,7 +25,18 @@ :var IGNORED_EXTENSIONS: file extensions that may usually be ignored """ __docformat__ = "restructuredtext en" -from logilab.common.__pkginfo__ import version as __version__ + +import sys +import types +import pkg_resources + +__version__ = pkg_resources.get_distribution('logilab-common').version + +# deprecated, but keep compatibility with pylint < 1.4.4 +__pkginfo__ = types.ModuleType('__pkginfo__') +__pkginfo__.__package__ = __name__ +__pkginfo__.version = __version__ +sys.modules['logilab.common.__pkginfo__'] = __pkginfo__ STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') @@ -57,8 +68,9 @@ def __getitem__(self, attr): class nullobject(object): def __repr__(self): return '' - def __nonzero__(self): + def __bool__(self): return False + __nonzero__ = __bool__ class tempattr(object): def __init__(self, obj, attr, value): @@ -138,6 +150,7 @@ def make_domains(lists): >>> make_domains(['a', 'b'], ['c','d', 'e']) [['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']] """ + from six.moves import range domains = [] for iterable in lists: new_domain = iterable[:] diff --git a/pymode/libs/logilab/common/cache.py b/pymode/libs/logilab/common/cache.py new file mode 100644 index 00000000..11ed1370 --- /dev/null +++ b/pymode/libs/logilab/common/cache.py @@ -0,0 +1,114 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Cache module, with a least recently used algorithm for the management of the +deletion of entries. + + + + +""" +__docformat__ = "restructuredtext en" + +from threading import Lock + +from logilab.common.decorators import locked + +_marker = object() + +class Cache(dict): + """A dictionary like cache. + + inv: + len(self._usage) <= self.size + len(self.data) <= self.size + """ + + def __init__(self, size=100): + """ Warning : Cache.__init__() != dict.__init__(). + Constructor does not take any arguments beside size. + """ + assert size >= 0, 'cache size must be >= 0 (0 meaning no caching)' + self.size = size + self._usage = [] + self._lock = Lock() + super(Cache, self).__init__() + + def _acquire(self): + self._lock.acquire() + + def _release(self): + self._lock.release() + + def _update_usage(self, key): + if not self._usage: + self._usage.append(key) + elif self._usage[-1] != key: + try: + self._usage.remove(key) + except ValueError: + # we are inserting a new key + # check the size of the dictionary + # and remove the oldest item in the cache + if self.size and len(self._usage) >= self.size: + super(Cache, self).__delitem__(self._usage[0]) + del self._usage[0] + self._usage.append(key) + else: + pass # key is already the most recently used key + + def __getitem__(self, key): + value = super(Cache, self).__getitem__(key) + self._update_usage(key) + return value + __getitem__ = locked(_acquire, _release)(__getitem__) + + def __setitem__(self, key, item): + # Just make sure that size > 0 before inserting a new item in the cache + if self.size > 0: + super(Cache, self).__setitem__(key, item) + self._update_usage(key) + __setitem__ = locked(_acquire, _release)(__setitem__) + + def __delitem__(self, key): + super(Cache, self).__delitem__(key) + self._usage.remove(key) + __delitem__ = locked(_acquire, _release)(__delitem__) + + def clear(self): + super(Cache, self).clear() + self._usage = [] + clear = locked(_acquire, _release)(clear) + + def pop(self, key, default=_marker): + if key in self: + self._usage.remove(key) + #if default is _marker: + # return super(Cache, self).pop(key) + return super(Cache, self).pop(key, default) + pop = locked(_acquire, _release)(pop) + + def popitem(self): + raise NotImplementedError() + + def setdefault(self, key, default=None): + raise NotImplementedError() + + def update(self, other): + raise NotImplementedError() + + diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py b/pymode/libs/logilab/common/changelog.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py rename to pymode/libs/logilab/common/changelog.py index 74f51241..2fff2ed6 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/changelog.py +++ b/pymode/libs/logilab/common/changelog.py @@ -49,6 +49,8 @@ import sys from stat import S_IWRITE +from six import string_types + BULLET = '*' SUBBULLET = '-' INDENT = ' ' * 4 @@ -64,7 +66,7 @@ class Version(tuple): correctly printing it as X.Y.Z """ def __new__(cls, versionstr): - if isinstance(versionstr, basestring): + if isinstance(versionstr, string_types): versionstr = versionstr.strip(' :') # XXX (syt) duh? parsed = cls.parse(versionstr) else: @@ -76,7 +78,7 @@ def parse(cls, versionstr): versionstr = versionstr.strip(' :') try: return [int(i) for i in versionstr.split('.')] - except ValueError, ex: + except ValueError as ex: raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex)) def __str__(self): diff --git a/pymode/libs/logilab/common/clcommands.py b/pymode/libs/logilab/common/clcommands.py new file mode 100644 index 00000000..4778b99b --- /dev/null +++ b/pymode/libs/logilab/common/clcommands.py @@ -0,0 +1,334 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Helper functions to support command line tools providing more than +one command. + +e.g called as "tool command [options] args..." where and are +command'specific +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import logging +from os.path import basename + +from logilab.common.configuration import Configuration +from logilab.common.logging_ext import init_log, get_threshold +from logilab.common.deprecation import deprecated + + +class BadCommandUsage(Exception): + """Raised when an unknown command is used or when a command is not + correctly used (bad options, too much / missing arguments...). + + Trigger display of command usage. + """ + +class CommandError(Exception): + """Raised when a command can't be processed and we want to display it and + exit, without traceback nor usage displayed. + """ + + +# command line access point #################################################### + +class CommandLine(dict): + """Usage: + + >>> LDI = cli.CommandLine('ldi', doc='Logilab debian installer', + version=version, rcfile=RCFILE) + >>> LDI.register(MyCommandClass) + >>> LDI.register(MyOtherCommandClass) + >>> LDI.run(sys.argv[1:]) + + Arguments: + + * `pgm`, the program name, default to `basename(sys.argv[0])` + + * `doc`, a short description of the command line tool + + * `copyright`, additional doc string that will be appended to the generated + doc + + * `version`, version number of string of the tool. If specified, global + --version option will be available. + + * `rcfile`, path to a configuration file. If specified, global --C/--rc-file + option will be available? self.rcfile = rcfile + + * `logger`, logger to propagate to commands, default to + `logging.getLogger(self.pgm))` + """ + def __init__(self, pgm=None, doc=None, copyright=None, version=None, + rcfile=None, logthreshold=logging.ERROR, + check_duplicated_command=True): + if pgm is None: + pgm = basename(sys.argv[0]) + self.pgm = pgm + self.doc = doc + self.copyright = copyright + self.version = version + self.rcfile = rcfile + self.logger = None + self.logthreshold = logthreshold + self.check_duplicated_command = check_duplicated_command + + def register(self, cls, force=False): + """register the given :class:`Command` subclass""" + assert not self.check_duplicated_command or force or not cls.name in self, \ + 'a command %s is already defined' % cls.name + self[cls.name] = cls + return cls + + def run(self, args): + """main command line access point: + * init logging + * handle global options (-h/--help, --version, -C/--rc-file) + * check command + * run command + + Terminate by :exc:`SystemExit` + """ + init_log(debug=True, # so that we use StreamHandler + logthreshold=self.logthreshold, + logformat='%(levelname)s: %(message)s') + try: + arg = args.pop(0) + except IndexError: + self.usage_and_exit(1) + if arg in ('-h', '--help'): + self.usage_and_exit(0) + if self.version is not None and arg in ('--version'): + print(self.version) + sys.exit(0) + rcfile = self.rcfile + if rcfile is not None and arg in ('-C', '--rc-file'): + try: + rcfile = args.pop(0) + arg = args.pop(0) + except IndexError: + self.usage_and_exit(1) + try: + command = self.get_command(arg) + except KeyError: + print('ERROR: no %s command' % arg) + print() + self.usage_and_exit(1) + try: + sys.exit(command.main_run(args, rcfile)) + except KeyboardInterrupt as exc: + print('Interrupted', end=' ') + if str(exc): + print(': %s' % exc, end=' ') + print() + sys.exit(4) + except BadCommandUsage as err: + print('ERROR:', err) + print() + print(command.help()) + sys.exit(1) + + def create_logger(self, handler, logthreshold=None): + logger = logging.Logger(self.pgm) + logger.handlers = [handler] + if logthreshold is None: + logthreshold = get_threshold(self.logthreshold) + logger.setLevel(logthreshold) + return logger + + def get_command(self, cmd, logger=None): + if logger is None: + logger = self.logger + if logger is None: + logger = self.logger = logging.getLogger(self.pgm) + logger.setLevel(get_threshold(self.logthreshold)) + return self[cmd](logger) + + def usage(self): + """display usage for the main program (i.e. when no command supplied) + and exit + """ + print('usage:', self.pgm, end=' ') + if self.rcfile: + print('[--rc-file=]', end=' ') + print(' [options] ...') + if self.doc: + print('\n%s' % self.doc) + print(''' +Type "%(pgm)s --help" for more information about a specific +command. Available commands are :\n''' % self.__dict__) + max_len = max([len(cmd) for cmd in self]) + padding = ' ' * max_len + for cmdname, cmd in sorted(self.items()): + if not cmd.hidden: + print(' ', (cmdname + padding)[:max_len], cmd.short_description()) + if self.rcfile: + print(''' +Use --rc-file= / -C before the command +to specify a configuration file. Default to %s. +''' % self.rcfile) + print('''%(pgm)s -h/--help + display this usage information and exit''' % self.__dict__) + if self.version: + print('''%(pgm)s -v/--version + display version configuration and exit''' % self.__dict__) + if self.copyright: + print('\n', self.copyright) + + def usage_and_exit(self, status): + self.usage() + sys.exit(status) + + +# base command classes ######################################################### + +class Command(Configuration): + """Base class for command line commands. + + Class attributes: + + * `name`, the name of the command + + * `min_args`, minimum number of arguments, None if unspecified + + * `max_args`, maximum number of arguments, None if unspecified + + * `arguments`, string describing arguments, used in command usage + + * `hidden`, boolean flag telling if the command should be hidden, e.g. does + not appear in help's commands list + + * `options`, options list, as allowed by :mod:configuration + """ + + arguments = '' + name = '' + # hidden from help ? + hidden = False + # max/min args, None meaning unspecified + min_args = None + max_args = None + + @classmethod + def description(cls): + return cls.__doc__.replace(' ', '') + + @classmethod + def short_description(cls): + return cls.description().split('.')[0] + + def __init__(self, logger): + usage = '%%prog %s %s\n\n%s' % (self.name, self.arguments, + self.description()) + Configuration.__init__(self, usage=usage) + self.logger = logger + + def check_args(self, args): + """check command's arguments are provided""" + if self.min_args is not None and len(args) < self.min_args: + raise BadCommandUsage('missing argument') + if self.max_args is not None and len(args) > self.max_args: + raise BadCommandUsage('too many arguments') + + def main_run(self, args, rcfile=None): + """Run the command and return status 0 if everything went fine. + + If :exc:`CommandError` is raised by the underlying command, simply log + the error and return status 2. + + Any other exceptions, including :exc:`BadCommandUsage` will be + propagated. + """ + if rcfile: + self.load_file_configuration(rcfile) + args = self.load_command_line_configuration(args) + try: + self.check_args(args) + self.run(args) + except CommandError as err: + self.logger.error(err) + return 2 + return 0 + + def run(self, args): + """run the command with its specific arguments""" + raise NotImplementedError() + + +class ListCommandsCommand(Command): + """list available commands, useful for bash completion.""" + name = 'listcommands' + arguments = '[command]' + hidden = True + + def run(self, args): + """run the command with its specific arguments""" + if args: + command = args.pop() + cmd = _COMMANDS[command] + for optname, optdict in cmd.options: + print('--help') + print('--' + optname) + else: + commands = sorted(_COMMANDS.keys()) + for command in commands: + cmd = _COMMANDS[command] + if not cmd.hidden: + print(command) + + +# deprecated stuff ############################################################# + +_COMMANDS = CommandLine() + +DEFAULT_COPYRIGHT = '''\ +Copyright (c) 2004-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +http://www.logilab.fr/ -- mailto:contact@logilab.fr''' + +@deprecated('use cls.register(cli)') +def register_commands(commands): + """register existing commands""" + for command_klass in commands: + _COMMANDS.register(command_klass) + +@deprecated('use args.pop(0)') +def main_run(args, doc=None, copyright=None, version=None): + """command line tool: run command specified by argument list (without the + program name). Raise SystemExit with status 0 if everything went fine. + + >>> main_run(sys.argv[1:]) + """ + _COMMANDS.doc = doc + _COMMANDS.copyright = copyright + _COMMANDS.version = version + _COMMANDS.run(args) + +@deprecated('use args.pop(0)') +def pop_arg(args_list, expected_size_after=None, msg="Missing argument"): + """helper function to get and check command line arguments""" + try: + value = args_list.pop(0) + except IndexError: + raise BadCommandUsage(msg) + if expected_size_after is not None and len(args_list) > expected_size_after: + raise BadCommandUsage('too many arguments') + return value + diff --git a/pymode/libs/logilab/common/compat.py b/pymode/libs/logilab/common/compat.py new file mode 100644 index 00000000..f2eb5905 --- /dev/null +++ b/pymode/libs/logilab/common/compat.py @@ -0,0 +1,78 @@ +# pylint: disable=E0601,W0622,W0611 +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Wrappers around some builtins introduced in python 2.3, 2.4 and +2.5, making them available in for earlier versions of python. + +See another compatibility snippets from other projects: + + :mod:`lib2to3.fixes` + :mod:`coverage.backward` + :mod:`unittest2.compatibility` +""" + + +__docformat__ = "restructuredtext en" + +import os +import sys +import types +from warnings import warn + +# not used here, but imported to preserve API +from six.moves import builtins + +if sys.version_info < (3, 0): + str_to_bytes = str + def str_encode(string, encoding): + if isinstance(string, unicode): + return string.encode(encoding) + return str(string) +else: + def str_to_bytes(string): + return str.encode(string) + # we have to ignore the encoding in py3k to be able to write a string into a + # TextIOWrapper or like object (which expect an unicode string) + def str_encode(string, encoding): + return str(string) + +# See also http://bugs.python.org/issue11776 +if sys.version_info[0] == 3: + def method_type(callable, instance, klass): + # api change. klass is no more considered + return types.MethodType(callable, instance) +else: + # alias types otherwise + method_type = types.MethodType + +# Pythons 2 and 3 differ on where to get StringIO +if sys.version_info < (3, 0): + from cStringIO import StringIO + FileIO = file + BytesIO = StringIO + reload = reload +else: + from io import FileIO, BytesIO, StringIO + from imp import reload + +from logilab.common.deprecation import deprecated + +# Other projects import these from here, keep providing them for +# backwards compat +any = deprecated('use builtin "any"')(any) +all = deprecated('use builtin "all"')(all) diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py b/pymode/libs/logilab/common/configuration.py similarity index 94% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py rename to pymode/libs/logilab/common/configuration.py index fa93a056..b2924277 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/configuration.py +++ b/pymode/libs/logilab/common/configuration.py @@ -96,8 +96,19 @@ multiple=4,5,6 number=3 - >>> + + Note : starting with Python 2.7 ConfigParser is able to take into + account the order of occurrences of the options into a file (by + using an OrderedDict). If you have two options changing some common + state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their + order of appearance will be significant : the last specified in the + file wins. For earlier version of python and logilab.common newer + than 0.61 the behaviour is unspecified. + """ + +from __future__ import print_function + __docformat__ = "restructuredtext en" __all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn', @@ -109,11 +120,12 @@ import re from os.path import exists, expanduser from copy import copy -from ConfigParser import ConfigParser, NoOptionError, NoSectionError, \ - DuplicateSectionError from warnings import warn -from logilab.common.compat import callable, raw_input, str_encode as _encode +from six import string_types +from six.moves import range, configparser as cp, input + +from logilab.common.compat import str_encode as _encode from logilab.common.deprecation import deprecated from logilab.common.textutils import normalize_text, unquote from logilab.common import optik_ext @@ -244,23 +256,23 @@ def input_password(optdict, question='password:'): value2 = getpass('confirm: ') if value == value2: return value - print 'password mismatch, try again' + print('password mismatch, try again') def input_string(optdict, question): - value = raw_input(question).strip() + value = input(question).strip() return value or None def _make_input_function(opttype): def input_validator(optdict, question): while True: - value = raw_input(question) + value = input(question) if not value.strip(): return None try: return _call_validator(opttype, optdict, None, value) - except optik_ext.OptionValueError, ex: + except optik_ext.OptionValueError as ex: msg = str(ex).split(':', 1)[-1].strip() - print 'bad value: %s' % msg + print('bad value: %s' % msg) return input_validator INPUT_FUNCTIONS = { @@ -358,7 +370,7 @@ def format_option_value(optdict, value): value = value.pattern elif optdict.get('type') == 'yn': value = value and 'yes' or 'no' - elif isinstance(value, (str, unicode)) and value.isspace(): + elif isinstance(value, string_types) and value.isspace(): value = "'%s'" % value elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)): value = format_time(value) @@ -370,8 +382,8 @@ def ini_format_section(stream, section, options, encoding=None, doc=None): """format an options section using the INI format""" encoding = _get_encoding(encoding, stream) if doc: - print >> stream, _encode(comment(doc), encoding) - print >> stream, '[%s]' % section + print(_encode(comment(doc), encoding), file=stream) + print('[%s]' % section, file=stream) ini_format(stream, options, encoding) def ini_format(stream, options, encoding): @@ -381,37 +393,36 @@ def ini_format(stream, options, encoding): help = optdict.get('help') if help: help = normalize_text(help, line_len=79, indent='# ') - print >> stream - print >> stream, _encode(help, encoding) + print(file=stream) + print(_encode(help, encoding), file=stream) else: - print >> stream + print(file=stream) if value is None: - print >> stream, '#%s=' % optname + print('#%s=' % optname, file=stream) else: value = _encode(value, encoding).strip() - print >> stream, '%s=%s' % (optname, value) + print('%s=%s' % (optname, value), file=stream) format_section = ini_format_section def rest_format_section(stream, section, options, encoding=None, doc=None): - """format an options section using the INI format""" + """format an options section using as ReST formatted output""" encoding = _get_encoding(encoding, stream) if section: - print >> stream, '%s\n%s' % (section, "'"*len(section)) + print('%s\n%s' % (section, "'"*len(section)), file=stream) if doc: - print >> stream, _encode(normalize_text(doc, line_len=79, indent=''), - encoding) - print >> stream + print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream) + print(file=stream) for optname, optdict, value in options: help = optdict.get('help') - print >> stream, ':%s:' % optname + print(':%s:' % optname, file=stream) if help: help = normalize_text(help, line_len=79, indent=' ') - print >> stream, _encode(help, encoding) + print(_encode(help, encoding), file=stream) if value: value = _encode(format_option_value(optdict, value), encoding) - print >> stream, '' - print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``") + print(file=stream) + print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream) # Options Manager ############################################################## @@ -436,7 +447,7 @@ def __init__(self, usage, config_file=None, version=None, quiet=0): def reset_parsers(self, usage='', version=None): # configuration file parser - self.cfgfile_parser = ConfigParser() + self.cfgfile_parser = cp.ConfigParser() # command line parser self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version) self.cmdline_parser.options_manager = self @@ -521,7 +532,7 @@ def optik_option(self, provider, opt, optdict): args.append('-' + optdict['short']) del optdict['short'] # cleanup option definition dict before giving it to optik - for key in optdict.keys(): + for key in list(optdict.keys()): if not key in self._optik_option_attrs: optdict.pop(key) return args, optdict @@ -568,7 +579,7 @@ def generate_config(self, stream=None, skipsections=(), encoding=None): printed = False for section in sections: if printed: - print >> stream, '\n' + print('\n', file=stream) format_section(stream, section.upper(), options_by_section[section], encoding) printed = True @@ -607,7 +618,7 @@ def read_config_file(self, config_file=None): if opt in self._all_options: break # already processed def helpfunc(option, opt, val, p, level=helplevel): - print self.help(level) + print(self.help(level)) sys.exit(0) helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel) optdict = {'action' : 'callback', 'callback' : helpfunc, @@ -629,7 +640,7 @@ def helpfunc(option, opt, val, p, level=helplevel): parser._sections[sect.upper()] = values elif not self.quiet: msg = 'No config file found, using default configuration' - print >> sys.stderr, msg + print(msg, file=sys.stderr) return def input_config(self, onlysection=None, inputlevel=0, stream=None): @@ -655,13 +666,13 @@ def load_config_file(self): options provider) """ parser = self.cfgfile_parser - for provider in self.options_providers: - for section, option, optdict in provider.all_options(): - try: - value = parser.get(section, option) - provider.set_option(option, value, optdict=optdict) - except (NoSectionError, NoOptionError), ex: - continue + for section in parser.sections(): + for option, value in parser.items(section): + try: + self.global_set_option(option, value) + except (KeyError, OptionError): + # TODO handle here undeclared options appearing in the config file + continue def load_configuration(self, **kwargs): """override configuration according to given parameters @@ -853,12 +864,12 @@ def input_option(self, option, optdict, inputlevel=99): defaultstr = ': ' else: defaultstr = '(default: %s): ' % format_option_value(optdict, default) - print ':%s:' % option - print optdict.get('help') or option + print(':%s:' % option) + print(optdict.get('help') or option) inputfunc = INPUT_FUNCTIONS[optdict['type']] value = inputfunc(optdict, defaultstr) while default is REQUIRED and not value: - print 'please specify a value' + print('please specify a value') value = inputfunc(optdict, '%s: ' % option) if value is None and default is not None: value = default diff --git a/pymode/libs/logilab/common/daemon.py b/pymode/libs/logilab/common/daemon.py new file mode 100644 index 00000000..40319a43 --- /dev/null +++ b/pymode/libs/logilab/common/daemon.py @@ -0,0 +1,101 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""A daemonize function (for Unices)""" + +__docformat__ = "restructuredtext en" + +import os +import errno +import signal +import sys +import time +import warnings + +from six.moves import range + +def setugid(user): + """Change process user and group ID + + Argument is a numeric user id or a user name""" + try: + from pwd import getpwuid + passwd = getpwuid(int(user)) + except ValueError: + from pwd import getpwnam + passwd = getpwnam(user) + + if hasattr(os, 'initgroups'): # python >= 2.7 + os.initgroups(passwd.pw_name, passwd.pw_gid) + else: + import ctypes + if ctypes.CDLL(None).initgroups(passwd.pw_name, passwd.pw_gid) < 0: + err = ctypes.c_int.in_dll(ctypes.pythonapi,"errno").value + raise OSError(err, os.strerror(err), 'initgroups') + os.setgid(passwd.pw_gid) + os.setuid(passwd.pw_uid) + os.environ['HOME'] = passwd.pw_dir + + +def daemonize(pidfile=None, uid=None, umask=0o77): + """daemonize a Unix process. Set paranoid umask by default. + + Return 1 in the original process, 2 in the first fork, and None for the + second fork (eg daemon process). + """ + # http://www.faqs.org/faqs/unix-faq/programmer/faq/ + # + # fork so the parent can exit + if os.fork(): # launch child and... + return 1 + # disconnect from tty and create a new session + os.setsid() + # fork again so the parent, (the session group leader), can exit. + # as a non-session group leader, we can never regain a controlling + # terminal. + if os.fork(): # launch child again. + return 2 + # move to the root to avoit mount pb + os.chdir('/') + # redirect standard descriptors + null = os.open('/dev/null', os.O_RDWR) + for i in range(3): + try: + os.dup2(null, i) + except OSError as e: + if e.errno != errno.EBADF: + raise + os.close(null) + # filter warnings + warnings.filterwarnings('ignore') + # write pid in a file + if pidfile: + # ensure the directory where the pid-file should be set exists (for + # instance /var/run/cubicweb may be deleted on computer restart) + piddir = os.path.dirname(pidfile) + if not os.path.exists(piddir): + os.makedirs(piddir) + f = file(pidfile, 'w') + f.write(str(os.getpid())) + f.close() + # set umask if specified + if umask is not None: + os.umask(umask) + # change process uid + if uid: + setugid(uid) + return None diff --git a/pymode/libs/logilab/common/date.py b/pymode/libs/logilab/common/date.py new file mode 100644 index 00000000..a093a8a9 --- /dev/null +++ b/pymode/libs/logilab/common/date.py @@ -0,0 +1,335 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Date manipulation helper functions.""" +from __future__ import division + +__docformat__ = "restructuredtext en" + +import math +import re +import sys +from locale import getlocale, LC_TIME +from datetime import date, time, datetime, timedelta +from time import strptime as time_strptime +from calendar import monthrange, timegm + +from six.moves import range + +try: + from mx.DateTime import RelativeDateTime, Date, DateTimeType +except ImportError: + endOfMonth = None + DateTimeType = datetime +else: + endOfMonth = RelativeDateTime(months=1, day=-1) + +# NOTE: should we implement a compatibility layer between date representations +# as we have in lgc.db ? + +FRENCH_FIXED_HOLIDAYS = { + 'jour_an': '%s-01-01', + 'fete_travail': '%s-05-01', + 'armistice1945': '%s-05-08', + 'fete_nat': '%s-07-14', + 'assomption': '%s-08-15', + 'toussaint': '%s-11-01', + 'armistice1918': '%s-11-11', + 'noel': '%s-12-25', + } + +FRENCH_MOBILE_HOLIDAYS = { + 'paques2004': '2004-04-12', + 'ascension2004': '2004-05-20', + 'pentecote2004': '2004-05-31', + + 'paques2005': '2005-03-28', + 'ascension2005': '2005-05-05', + 'pentecote2005': '2005-05-16', + + 'paques2006': '2006-04-17', + 'ascension2006': '2006-05-25', + 'pentecote2006': '2006-06-05', + + 'paques2007': '2007-04-09', + 'ascension2007': '2007-05-17', + 'pentecote2007': '2007-05-28', + + 'paques2008': '2008-03-24', + 'ascension2008': '2008-05-01', + 'pentecote2008': '2008-05-12', + + 'paques2009': '2009-04-13', + 'ascension2009': '2009-05-21', + 'pentecote2009': '2009-06-01', + + 'paques2010': '2010-04-05', + 'ascension2010': '2010-05-13', + 'pentecote2010': '2010-05-24', + + 'paques2011': '2011-04-25', + 'ascension2011': '2011-06-02', + 'pentecote2011': '2011-06-13', + + 'paques2012': '2012-04-09', + 'ascension2012': '2012-05-17', + 'pentecote2012': '2012-05-28', + } + +# XXX this implementation cries for multimethod dispatching + +def get_step(dateobj, nbdays=1): + # assume date is either a python datetime or a mx.DateTime object + if isinstance(dateobj, date): + return ONEDAY * nbdays + return nbdays # mx.DateTime is ok with integers + +def datefactory(year, month, day, sampledate): + # assume date is either a python datetime or a mx.DateTime object + if isinstance(sampledate, datetime): + return datetime(year, month, day) + if isinstance(sampledate, date): + return date(year, month, day) + return Date(year, month, day) + +def weekday(dateobj): + # assume date is either a python datetime or a mx.DateTime object + if isinstance(dateobj, date): + return dateobj.weekday() + return dateobj.day_of_week + +def str2date(datestr, sampledate): + # NOTE: datetime.strptime is not an option until we drop py2.4 compat + year, month, day = [int(chunk) for chunk in datestr.split('-')] + return datefactory(year, month, day, sampledate) + +def days_between(start, end): + if isinstance(start, date): + delta = end - start + # datetime.timedelta.days is always an integer (floored) + if delta.seconds: + return delta.days + 1 + return delta.days + else: + return int(math.ceil((end - start).days)) + +def get_national_holidays(begin, end): + """return french national days off between begin and end""" + begin = datefactory(begin.year, begin.month, begin.day, begin) + end = datefactory(end.year, end.month, end.day, end) + holidays = [str2date(datestr, begin) + for datestr in FRENCH_MOBILE_HOLIDAYS.values()] + for year in range(begin.year, end.year+1): + for datestr in FRENCH_FIXED_HOLIDAYS.values(): + date = str2date(datestr % year, begin) + if date not in holidays: + holidays.append(date) + return [day for day in holidays if begin <= day < end] + +def add_days_worked(start, days): + """adds date but try to only take days worked into account""" + step = get_step(start) + weeks, plus = divmod(days, 5) + end = start + ((weeks * 7) + plus) * step + if weekday(end) >= 5: # saturday or sunday + end += (2 * step) + end += len([x for x in get_national_holidays(start, end + step) + if weekday(x) < 5]) * step + if weekday(end) >= 5: # saturday or sunday + end += (2 * step) + return end + +def nb_open_days(start, end): + assert start <= end + step = get_step(start) + days = days_between(start, end) + weeks, plus = divmod(days, 7) + if weekday(start) > weekday(end): + plus -= 2 + elif weekday(end) == 6: + plus -= 1 + open_days = weeks * 5 + plus + nb_week_holidays = len([x for x in get_national_holidays(start, end+step) + if weekday(x) < 5 and x < end]) + open_days -= nb_week_holidays + if open_days < 0: + return 0 + return open_days + +def date_range(begin, end, incday=None, incmonth=None): + """yields each date between begin and end + + :param begin: the start date + :param end: the end date + :param incr: the step to use to iterate over dates. Default is + one day. + :param include: None (means no exclusion) or a function taking a + date as parameter, and returning True if the date + should be included. + + When using mx datetime, you should *NOT* use incmonth argument, use instead + oneDay, oneHour, oneMinute, oneSecond, oneWeek or endOfMonth (to enumerate + months) as `incday` argument + """ + assert not (incday and incmonth) + begin = todate(begin) + end = todate(end) + if incmonth: + while begin < end: + yield begin + begin = next_month(begin, incmonth) + else: + incr = get_step(begin, incday or 1) + while begin < end: + yield begin + begin += incr + +# makes py datetime usable ##################################################### + +ONEDAY = timedelta(days=1) +ONEWEEK = timedelta(days=7) + +try: + strptime = datetime.strptime +except AttributeError: # py < 2.5 + from time import strptime as time_strptime + def strptime(value, format): + return datetime(*time_strptime(value, format)[:6]) + +def strptime_time(value, format='%H:%M'): + return time(*time_strptime(value, format)[3:6]) + +def todate(somedate): + """return a date from a date (leaving unchanged) or a datetime""" + if isinstance(somedate, datetime): + return date(somedate.year, somedate.month, somedate.day) + assert isinstance(somedate, (date, DateTimeType)), repr(somedate) + return somedate + +def totime(somedate): + """return a time from a time (leaving unchanged), date or datetime""" + # XXX mx compat + if not isinstance(somedate, time): + return time(somedate.hour, somedate.minute, somedate.second) + assert isinstance(somedate, (time)), repr(somedate) + return somedate + +def todatetime(somedate): + """return a date from a date (leaving unchanged) or a datetime""" + # take care, datetime is a subclass of date + if isinstance(somedate, datetime): + return somedate + assert isinstance(somedate, (date, DateTimeType)), repr(somedate) + return datetime(somedate.year, somedate.month, somedate.day) + +def datetime2ticks(somedate): + return timegm(somedate.timetuple()) * 1000 + +def ticks2datetime(ticks): + miliseconds, microseconds = divmod(ticks, 1000) + try: + return datetime.fromtimestamp(miliseconds) + except (ValueError, OverflowError): + epoch = datetime.fromtimestamp(0) + nb_days, seconds = divmod(int(miliseconds), 86400) + delta = timedelta(nb_days, seconds=seconds, microseconds=microseconds) + try: + return epoch + delta + except (ValueError, OverflowError): + raise + +def days_in_month(somedate): + return monthrange(somedate.year, somedate.month)[1] + +def days_in_year(somedate): + feb = date(somedate.year, 2, 1) + if days_in_month(feb) == 29: + return 366 + else: + return 365 + +def previous_month(somedate, nbmonth=1): + while nbmonth: + somedate = first_day(somedate) - ONEDAY + nbmonth -= 1 + return somedate + +def next_month(somedate, nbmonth=1): + while nbmonth: + somedate = last_day(somedate) + ONEDAY + nbmonth -= 1 + return somedate + +def first_day(somedate): + return date(somedate.year, somedate.month, 1) + +def last_day(somedate): + return date(somedate.year, somedate.month, days_in_month(somedate)) + +def ustrftime(somedate, fmt='%Y-%m-%d'): + """like strftime, but returns a unicode string instead of an encoded + string which may be problematic with localized date. + """ + if sys.version_info >= (3, 3): + # datetime.date.strftime() supports dates since year 1 in Python >=3.3. + return somedate.strftime(fmt) + else: + try: + if sys.version_info < (3, 0): + encoding = getlocale(LC_TIME)[1] or 'ascii' + return unicode(somedate.strftime(str(fmt)), encoding) + else: + return somedate.strftime(fmt) + except ValueError: + if somedate.year >= 1900: + raise + # datetime is not happy with dates before 1900 + # we try to work around this, assuming a simple + # format string + fields = {'Y': somedate.year, + 'm': somedate.month, + 'd': somedate.day, + } + if isinstance(somedate, datetime): + fields.update({'H': somedate.hour, + 'M': somedate.minute, + 'S': somedate.second}) + fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt) + return unicode(fmt) % fields + +def utcdatetime(dt): + if dt.tzinfo is None: + return dt + return (dt.replace(tzinfo=None) - dt.utcoffset()) + +def utctime(dt): + if dt.tzinfo is None: + return dt + return (dt + dt.utcoffset() + dt.dst()).replace(tzinfo=None) + +def datetime_to_seconds(date): + """return the number of seconds since the begining of the day for that date + """ + return date.second+60*date.minute + 3600*date.hour + +def timedelta_to_days(delta): + """return the time delta as a number of seconds""" + return delta.days + delta.seconds / (3600*24) + +def timedelta_to_seconds(delta): + """return the time delta as a fraction of days""" + return delta.days*(3600*24) + delta.seconds diff --git a/pymode/libs/logilab/common/debugger.py b/pymode/libs/logilab/common/debugger.py new file mode 100644 index 00000000..1f540a18 --- /dev/null +++ b/pymode/libs/logilab/common/debugger.py @@ -0,0 +1,214 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Customized version of pdb's default debugger. + +- sets up a history file +- uses ipython if available to colorize lines of code +- overrides list command to search for current block instead + of using 5 lines of context + + + + +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +try: + import readline +except ImportError: + readline = None +import os +import os.path as osp +import sys +from pdb import Pdb +import inspect + +from logilab.common.compat import StringIO + +try: + from IPython import PyColorize +except ImportError: + def colorize(source, *args): + """fallback colorize function""" + return source + def colorize_source(source, *args): + return source +else: + def colorize(source, start_lineno, curlineno): + """colorize and annotate source with linenos + (as in pdb's list command) + """ + parser = PyColorize.Parser() + output = StringIO() + parser.format(source, output) + annotated = [] + for index, line in enumerate(output.getvalue().splitlines()): + lineno = index + start_lineno + if lineno == curlineno: + annotated.append('%4s\t->\t%s' % (lineno, line)) + else: + annotated.append('%4s\t\t%s' % (lineno, line)) + return '\n'.join(annotated) + + def colorize_source(source): + """colorize given source""" + parser = PyColorize.Parser() + output = StringIO() + parser.format(source, output) + return output.getvalue() + + +def getsource(obj): + """Return the text of the source code for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a single string. An + IOError is raised if the source code cannot be retrieved.""" + lines, lnum = inspect.getsourcelines(obj) + return ''.join(lines), lnum + + +################################################################ +class Debugger(Pdb): + """custom debugger + + - sets up a history file + - uses ipython if available to colorize lines of code + - overrides list command to search for current block instead + of using 5 lines of context + """ + def __init__(self, tcbk=None): + Pdb.__init__(self) + self.reset() + if tcbk: + while tcbk.tb_next is not None: + tcbk = tcbk.tb_next + self._tcbk = tcbk + self._histfile = os.path.expanduser("~/.pdbhist") + + def setup_history_file(self): + """if readline is available, read pdb history file + """ + if readline is not None: + try: + # XXX try..except shouldn't be necessary + # read_history_file() can accept None + readline.read_history_file(self._histfile) + except IOError: + pass + + def start(self): + """starts the interactive mode""" + self.interaction(self._tcbk.tb_frame, self._tcbk) + + def setup(self, frame, tcbk): + """setup hook: set up history file""" + self.setup_history_file() + Pdb.setup(self, frame, tcbk) + + def set_quit(self): + """quit hook: save commands in the history file""" + if readline is not None: + readline.write_history_file(self._histfile) + Pdb.set_quit(self) + + def complete_p(self, text, line, begin_idx, end_idx): + """provide variable names completion for the ``p`` command""" + namespace = dict(self.curframe.f_globals) + namespace.update(self.curframe.f_locals) + if '.' in text: + return self.attr_matches(text, namespace) + return [varname for varname in namespace if varname.startswith(text)] + + + def attr_matches(self, text, namespace): + """implementation coming from rlcompleter.Completer.attr_matches + Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace, it will be evaluated and its attributes + (as revealed by dir()) are used as possible completions. (For class + instances, class members are also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + import re + m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text) + if not m: + return + expr, attr = m.group(1, 3) + object = eval(expr, namespace) + words = dir(object) + if hasattr(object, '__class__'): + words.append('__class__') + words = words + self.get_class_members(object.__class__) + matches = [] + n = len(attr) + for word in words: + if word[:n] == attr and word != "__builtins__": + matches.append("%s.%s" % (expr, word)) + return matches + + def get_class_members(self, klass): + """implementation coming from rlcompleter.get_class_members""" + ret = dir(klass) + if hasattr(klass, '__bases__'): + for base in klass.__bases__: + ret = ret + self.get_class_members(base) + return ret + + ## specific / overridden commands + def do_list(self, arg): + """overrides default list command to display the surrounding block + instead of 5 lines of context + """ + self.lastcmd = 'list' + if not arg: + try: + source, start_lineno = getsource(self.curframe) + print(colorize(''.join(source), start_lineno, + self.curframe.f_lineno)) + except KeyboardInterrupt: + pass + except IOError: + Pdb.do_list(self, arg) + else: + Pdb.do_list(self, arg) + do_l = do_list + + def do_open(self, arg): + """opens source file corresponding to the current stack level""" + filename = self.curframe.f_code.co_filename + lineno = self.curframe.f_lineno + cmd = 'emacsclient --no-wait +%s %s' % (lineno, filename) + os.system(cmd) + + do_o = do_open + +def pm(): + """use our custom debugger""" + dbg = Debugger(sys.last_traceback) + dbg.start() + +def set_trace(): + Debugger().set_trace(sys._getframe().f_back) diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py b/pymode/libs/logilab/common/decorators.py similarity index 95% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py rename to pymode/libs/logilab/common/decorators.py index 34bbd3a9..beafa202 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/decorators.py +++ b/pymode/libs/logilab/common/decorators.py @@ -16,27 +16,28 @@ # You should have received a copy of the GNU Lesser General Public License along # with logilab-common. If not, see . """ A few useful function/method decorators. """ + +from __future__ import print_function + __docformat__ = "restructuredtext en" import sys import types from time import clock, time +from inspect import isgeneratorfunction, getargspec -from logilab.common.compat import callable, method_type +from logilab.common.compat import method_type # XXX rewrite so we can use the decorator syntax when keyarg has to be specified -def _is_generator_function(callableobj): - return callableobj.func_code.co_flags & 0x20 - class cached_decorator(object): def __init__(self, cacheattr=None, keyarg=None): self.cacheattr = cacheattr self.keyarg = keyarg def __call__(self, callableobj=None): - assert not _is_generator_function(callableobj), \ + assert not isgeneratorfunction(callableobj), \ 'cannot cache generator function: %s' % callableobj - if callableobj.func_code.co_argcount == 1 or self.keyarg == 0: + if len(getargspec(callableobj).args) == 1 or self.keyarg == 0: cache = _SingleValueCache(callableobj, self.cacheattr) elif self.keyarg: cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr) @@ -68,7 +69,6 @@ def wrapped(*args, **kwargs): try: wrapped.__doc__ = self.callable.__doc__ wrapped.__name__ = self.callable.__name__ - wrapped.func_name = self.callable.func_name except: pass return wrapped @@ -227,8 +227,8 @@ def wrap(*args, **kwargs): t = time() c = clock() res = f(*args, **kwargs) - print '%s clock: %.9f / time: %.9f' % (f.__name__, - clock() - c, time() - t) + print('%s clock: %.9f / time: %.9f' % (f.__name__, + clock() - c, time() - t)) return res return wrap diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py b/pymode/libs/logilab/common/deprecation.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py rename to pymode/libs/logilab/common/deprecation.py index 02e4edbb..1c81b638 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/deprecation.py +++ b/pymode/libs/logilab/common/deprecation.py @@ -78,7 +78,7 @@ def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=Non def decorator(func): message = reason or 'The function "%s" is deprecated' if '%s' in message: - message %= func.func_name + message %= func.__name__ def wrapped(*args, **kwargs): self.warn(version, message, stacklevel+1) return func(*args, **kwargs) @@ -125,11 +125,12 @@ def class_renamed(self, version, old_name, new_class, message=None): return self.class_deprecated(version)(old_name, (new_class,), clsdict) except (NameError, TypeError): # old-style class + warn = self.warn class DeprecatedClass(new_class): """FIXME: There might be a better way to handle old/new-style class """ def __init__(self, *args, **kwargs): - self.warn(version, message, stacklevel=3) + warn(version, message, stacklevel=3) new_class.__init__(self, *args, **kwargs) return DeprecatedClass diff --git a/pymode/libs/logilab/common/fileutils.py b/pymode/libs/logilab/common/fileutils.py new file mode 100644 index 00000000..b30cf5f8 --- /dev/null +++ b/pymode/libs/logilab/common/fileutils.py @@ -0,0 +1,404 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""File and file-path manipulation utilities. + +:group path manipulation: first_level_directory, relative_path, is_binary,\ +get_by_ext, remove_dead_links +:group file manipulation: norm_read, norm_open, lines, stream_lines, lines,\ +write_open_mode, ensure_fs_mode, export +:sort: path manipulation, file manipulation +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import shutil +import mimetypes +from os.path import isabs, isdir, islink, split, exists, normpath, join +from os.path import abspath +from os import sep, mkdir, remove, listdir, stat, chmod, walk +from stat import ST_MODE, S_IWRITE + +from logilab.common import STD_BLACKLIST as BASE_BLACKLIST, IGNORED_EXTENSIONS +from logilab.common.shellutils import find +from logilab.common.deprecation import deprecated +from logilab.common.compat import FileIO + +def first_level_directory(path): + """Return the first level directory of a path. + + >>> first_level_directory('home/syt/work') + 'home' + >>> first_level_directory('/home/syt/work') + '/' + >>> first_level_directory('work') + 'work' + >>> + + :type path: str + :param path: the path for which we want the first level directory + + :rtype: str + :return: the first level directory appearing in `path` + """ + head, tail = split(path) + while head and tail: + head, tail = split(head) + if tail: + return tail + # path was absolute, head is the fs root + return head + +def abspath_listdir(path): + """Lists path's content using absolute paths. + + >>> os.listdir('/home') + ['adim', 'alf', 'arthur', 'auc'] + >>> abspath_listdir('/home') + ['/home/adim', '/home/alf', '/home/arthur', '/home/auc'] + """ + path = abspath(path) + return [join(path, filename) for filename in listdir(path)] + + +def is_binary(filename): + """Return true if filename may be a binary file, according to it's + extension. + + :type filename: str + :param filename: the name of the file + + :rtype: bool + :return: + true if the file is a binary file (actually if it's mime type + isn't beginning by text/) + """ + try: + return not mimetypes.guess_type(filename)[0].startswith('text') + except AttributeError: + return 1 + + +def write_open_mode(filename): + """Return the write mode that should used to open file. + + :type filename: str + :param filename: the name of the file + + :rtype: str + :return: the mode that should be use to open the file ('w' or 'wb') + """ + if is_binary(filename): + return 'wb' + return 'w' + + +def ensure_fs_mode(filepath, desired_mode=S_IWRITE): + """Check that the given file has the given mode(s) set, else try to + set it. + + :type filepath: str + :param filepath: path of the file + + :type desired_mode: int + :param desired_mode: + ORed flags describing the desired mode. Use constants from the + `stat` module for file permission's modes + """ + mode = stat(filepath)[ST_MODE] + if not mode & desired_mode: + chmod(filepath, mode | desired_mode) + + +# XXX (syt) unused? kill? +class ProtectedFile(FileIO): + """A special file-object class that automatically does a 'chmod +w' when + needed. + + XXX: for now, the way it is done allows 'normal file-objects' to be + created during the ProtectedFile object lifetime. + One way to circumvent this would be to chmod / unchmod on each + write operation. + + One other way would be to : + + - catch the IOError in the __init__ + + - if IOError, then create a StringIO object + + - each write operation writes in this StringIO object + + - on close()/del(), write/append the StringIO content to the file and + do the chmod only once + """ + def __init__(self, filepath, mode): + self.original_mode = stat(filepath)[ST_MODE] + self.mode_changed = False + if mode in ('w', 'a', 'wb', 'ab'): + if not self.original_mode & S_IWRITE: + chmod(filepath, self.original_mode | S_IWRITE) + self.mode_changed = True + FileIO.__init__(self, filepath, mode) + + def _restore_mode(self): + """restores the original mode if needed""" + if self.mode_changed: + chmod(self.name, self.original_mode) + # Don't re-chmod in case of several restore + self.mode_changed = False + + def close(self): + """restore mode before closing""" + self._restore_mode() + FileIO.close(self) + + def __del__(self): + if not self.closed: + self.close() + + +class UnresolvableError(Exception): + """Exception raised by relative path when it's unable to compute relative + path between two paths. + """ + +def relative_path(from_file, to_file): + """Try to get a relative path from `from_file` to `to_file` + (path will be absolute if to_file is an absolute file). This function + is useful to create link in `from_file` to `to_file`. This typical use + case is used in this function description. + + If both files are relative, they're expected to be relative to the same + directory. + + >>> relative_path( from_file='toto/index.html', to_file='index.html') + '../index.html' + >>> relative_path( from_file='index.html', to_file='toto/index.html') + 'toto/index.html' + >>> relative_path( from_file='tutu/index.html', to_file='toto/index.html') + '../toto/index.html' + >>> relative_path( from_file='toto/index.html', to_file='/index.html') + '/index.html' + >>> relative_path( from_file='/toto/index.html', to_file='/index.html') + '../index.html' + >>> relative_path( from_file='/toto/index.html', to_file='/toto/summary.html') + 'summary.html' + >>> relative_path( from_file='index.html', to_file='index.html') + '' + >>> relative_path( from_file='/index.html', to_file='toto/index.html') + Traceback (most recent call last): + File "", line 1, in ? + File "", line 37, in relative_path + UnresolvableError + >>> relative_path( from_file='/index.html', to_file='/index.html') + '' + >>> + + :type from_file: str + :param from_file: source file (where links will be inserted) + + :type to_file: str + :param to_file: target file (on which links point) + + :raise UnresolvableError: if it has been unable to guess a correct path + + :rtype: str + :return: the relative path of `to_file` from `from_file` + """ + from_file = normpath(from_file) + to_file = normpath(to_file) + if from_file == to_file: + return '' + if isabs(to_file): + if not isabs(from_file): + return to_file + elif isabs(from_file): + raise UnresolvableError() + from_parts = from_file.split(sep) + to_parts = to_file.split(sep) + idem = 1 + result = [] + while len(from_parts) > 1: + dirname = from_parts.pop(0) + if idem and len(to_parts) > 1 and dirname == to_parts[0]: + to_parts.pop(0) + else: + idem = 0 + result.append('..') + result += to_parts + return sep.join(result) + + +def norm_read(path): + """Return the content of the file with normalized line feeds. + + :type path: str + :param path: path to the file to read + + :rtype: str + :return: the content of the file with normalized line feeds + """ + return open(path, 'U').read() +norm_read = deprecated("use \"open(path, 'U').read()\"")(norm_read) + +def norm_open(path): + """Return a stream for a file with content with normalized line feeds. + + :type path: str + :param path: path to the file to open + + :rtype: file or StringIO + :return: the opened file with normalized line feeds + """ + return open(path, 'U') +norm_open = deprecated("use \"open(path, 'U')\"")(norm_open) + +def lines(path, comments=None): + """Return a list of non empty lines in the file located at `path`. + + :type path: str + :param path: path to the file + + :type comments: str or None + :param comments: + optional string which can be used to comment a line in the file + (i.e. lines starting with this string won't be returned) + + :rtype: list + :return: + a list of stripped line in the file, without empty and commented + lines + + :warning: at some point this function will probably return an iterator + """ + stream = open(path, 'U') + result = stream_lines(stream, comments) + stream.close() + return result + + +def stream_lines(stream, comments=None): + """Return a list of non empty lines in the given `stream`. + + :type stream: object implementing 'xreadlines' or 'readlines' + :param stream: file like object + + :type comments: str or None + :param comments: + optional string which can be used to comment a line in the file + (i.e. lines starting with this string won't be returned) + + :rtype: list + :return: + a list of stripped line in the file, without empty and commented + lines + + :warning: at some point this function will probably return an iterator + """ + try: + readlines = stream.xreadlines + except AttributeError: + readlines = stream.readlines + result = [] + for line in readlines(): + line = line.strip() + if line and (comments is None or not line.startswith(comments)): + result.append(line) + return result + + +def export(from_dir, to_dir, + blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, + verbose=0): + """Make a mirror of `from_dir` in `to_dir`, omitting directories and + files listed in the black list or ending with one of the given + extensions. + + :type from_dir: str + :param from_dir: directory to export + + :type to_dir: str + :param to_dir: destination directory + + :type blacklist: list or tuple + :param blacklist: + list of files or directories to ignore, default to the content of + `BASE_BLACKLIST` + + :type ignore_ext: list or tuple + :param ignore_ext: + list of extensions to ignore, default to the content of + `IGNORED_EXTENSIONS` + + :type verbose: bool + :param verbose: + flag indicating whether information about exported files should be + printed to stderr, default to False + """ + try: + mkdir(to_dir) + except OSError: + pass # FIXME we should use "exists" if the point is about existing dir + # else (permission problems?) shouldn't return / raise ? + for directory, dirnames, filenames in walk(from_dir): + for norecurs in blacklist: + try: + dirnames.remove(norecurs) + except ValueError: + continue + for dirname in dirnames: + src = join(directory, dirname) + dest = to_dir + src[len(from_dir):] + if isdir(src): + if not exists(dest): + mkdir(dest) + for filename in filenames: + # don't include binary files + # endswith does not accept tuple in 2.4 + if any([filename.endswith(ext) for ext in ignore_ext]): + continue + src = join(directory, filename) + dest = to_dir + src[len(from_dir):] + if verbose: + print(src, '->', dest, file=sys.stderr) + if exists(dest): + remove(dest) + shutil.copy2(src, dest) + + +def remove_dead_links(directory, verbose=0): + """Recursively traverse directory and remove all dead links. + + :type directory: str + :param directory: directory to cleanup + + :type verbose: bool + :param verbose: + flag indicating whether information about deleted links should be + printed to stderr, default to False + """ + for dirpath, dirname, filenames in walk(directory): + for filename in dirnames + filenames: + src = join(dirpath, filename) + if islink(src) and not exists(src): + if verbose: + print('remove dead link', src) + remove(src) + diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py b/pymode/libs/logilab/common/graph.py similarity index 93% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py rename to pymode/libs/logilab/common/graph.py index d62e8c09..cef1c984 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/graph.py +++ b/pymode/libs/logilab/common/graph.py @@ -29,6 +29,7 @@ import sys import tempfile import codecs +import errno def escape(value): """Make usable in a dot file.""" @@ -63,7 +64,7 @@ def __init__(self, graphname, rankdir=None, size=None, ratio=None, assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \ 'unsupported charset %s' % charset self.emit('charset="%s"' % charset) - for param in additionnal_param.iteritems(): + for param in sorted(additionnal_param.items()): self.emit('='.join(param)) def get_source(self): @@ -114,13 +115,18 @@ def generate(self, outputfile=None, dotfile=None, mapfile=None): use_shell = True else: use_shell = False - if mapfile: - subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile], - shell=use_shell) - else: - subprocess.call([self.renderer, '-T', target, - dot_sourcepath, '-o', outputfile], - shell=use_shell) + try: + if mapfile: + subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile], + shell=use_shell) + else: + subprocess.call([self.renderer, '-T', target, + dot_sourcepath, '-o', outputfile], + shell=use_shell) + except OSError as e: + if e.errno == errno.ENOENT: + e.strerror = 'File not found: {0}'.format(self.renderer) + raise os.unlink(dot_sourcepath) return outputfile diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/interface.py b/pymode/libs/logilab/common/interface.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/interface.py rename to pymode/libs/logilab/common/interface.py diff --git a/pymode/libs/logilab/common/logging_ext.py b/pymode/libs/logilab/common/logging_ext.py new file mode 100644 index 00000000..3b6a580a --- /dev/null +++ b/pymode/libs/logilab/common/logging_ext.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Extends the logging module from the standard library.""" + +__docformat__ = "restructuredtext en" + +import os +import sys +import logging + +from six import string_types + +from logilab.common.textutils import colorize_ansi + + +def set_log_methods(cls, logger): + """bind standard logger's methods as methods on the class""" + cls.__logger = logger + for attr in ('debug', 'info', 'warning', 'error', 'critical', 'exception'): + setattr(cls, attr, getattr(logger, attr)) + + +def xxx_cyan(record): + if 'XXX' in record.message: + return 'cyan' + +class ColorFormatter(logging.Formatter): + """ + A color Formatter for the logging standard module. + + By default, colorize CRITICAL and ERROR in red, WARNING in orange, INFO in + green and DEBUG in yellow. + + self.colors is customizable via the 'color' constructor argument (dictionary). + + self.colorfilters is a list of functions that get the LogRecord + and return a color name or None. + """ + + def __init__(self, fmt=None, datefmt=None, colors=None): + logging.Formatter.__init__(self, fmt, datefmt) + self.colorfilters = [] + self.colors = {'CRITICAL': 'red', + 'ERROR': 'red', + 'WARNING': 'magenta', + 'INFO': 'green', + 'DEBUG': 'yellow', + } + if colors is not None: + assert isinstance(colors, dict) + self.colors.update(colors) + + def format(self, record): + msg = logging.Formatter.format(self, record) + if record.levelname in self.colors: + color = self.colors[record.levelname] + return colorize_ansi(msg, color) + else: + for cf in self.colorfilters: + color = cf(record) + if color: + return colorize_ansi(msg, color) + return msg + +def set_color_formatter(logger=None, **kw): + """ + Install a color formatter on the 'logger'. If not given, it will + defaults to the default logger. + + Any additional keyword will be passed as-is to the ColorFormatter + constructor. + """ + if logger is None: + logger = logging.getLogger() + if not logger.handlers: + logging.basicConfig() + format_msg = logger.handlers[0].formatter._fmt + fmt = ColorFormatter(format_msg, **kw) + fmt.colorfilters.append(xxx_cyan) + logger.handlers[0].setFormatter(fmt) + + +LOG_FORMAT = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s' +LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S' + +def get_handler(debug=False, syslog=False, logfile=None, rotation_parameters=None): + """get an apropriate handler according to given parameters""" + if os.environ.get('APYCOT_ROOT'): + handler = logging.StreamHandler(sys.stdout) + if debug: + handler = logging.StreamHandler() + elif logfile is None: + if syslog: + from logging import handlers + handler = handlers.SysLogHandler() + else: + handler = logging.StreamHandler() + else: + try: + if rotation_parameters is None: + if os.name == 'posix' and sys.version_info >= (2, 6): + from logging.handlers import WatchedFileHandler + handler = WatchedFileHandler(logfile) + else: + handler = logging.FileHandler(logfile) + else: + from logging.handlers import TimedRotatingFileHandler + handler = TimedRotatingFileHandler( + logfile, **rotation_parameters) + except IOError: + handler = logging.StreamHandler() + return handler + +def get_threshold(debug=False, logthreshold=None): + if logthreshold is None: + if debug: + logthreshold = logging.DEBUG + else: + logthreshold = logging.ERROR + elif isinstance(logthreshold, string_types): + logthreshold = getattr(logging, THRESHOLD_MAP.get(logthreshold, + logthreshold)) + return logthreshold + +def _colorable_terminal(): + isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty() + if not isatty: + return False + if os.name == 'nt': + try: + from colorama import init as init_win32_colors + except ImportError: + return False + init_win32_colors() + return True + +def get_formatter(logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT): + if _colorable_terminal(): + fmt = ColorFormatter(logformat, logdateformat) + def col_fact(record): + if 'XXX' in record.message: + return 'cyan' + if 'kick' in record.message: + return 'red' + fmt.colorfilters.append(col_fact) + else: + fmt = logging.Formatter(logformat, logdateformat) + return fmt + +def init_log(debug=False, syslog=False, logthreshold=None, logfile=None, + logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT, fmt=None, + rotation_parameters=None, handler=None): + """init the log service""" + logger = logging.getLogger() + if handler is None: + handler = get_handler(debug, syslog, logfile, rotation_parameters) + # only addHandler and removeHandler method while I would like a setHandler + # method, so do it this way :$ + logger.handlers = [handler] + logthreshold = get_threshold(debug, logthreshold) + logger.setLevel(logthreshold) + if fmt is None: + if debug: + fmt = get_formatter(logformat=logformat, logdateformat=logdateformat) + else: + fmt = logging.Formatter(logformat, logdateformat) + handler.setFormatter(fmt) + return handler + +# map logilab.common.logger thresholds to logging thresholds +THRESHOLD_MAP = {'LOG_DEBUG': 'DEBUG', + 'LOG_INFO': 'INFO', + 'LOG_NOTICE': 'INFO', + 'LOG_WARN': 'WARNING', + 'LOG_WARNING': 'WARNING', + 'LOG_ERR': 'ERROR', + 'LOG_ERROR': 'ERROR', + 'LOG_CRIT': 'CRITICAL', + } diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py b/pymode/libs/logilab/common/modutils.py similarity index 94% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py rename to pymode/libs/logilab/common/modutils.py index 27568412..dd725d24 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/modutils.py +++ b/pymode/libs/logilab/common/modutils.py @@ -25,9 +25,8 @@ :var STD_LIB_DIR: directory where standard modules are located :type BUILTIN_MODULES: dict -:var BUILTIN_MODULES: dictionary with builtin module names has key +:var BUILTIN_MODULES: dictionary with builtin module names as key """ -from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -38,6 +37,8 @@ from distutils.sysconfig import get_config_var, get_python_lib, get_python_version from distutils.errors import DistutilsPlatformError +from six.moves import range + try: import zipimport except ImportError: @@ -61,7 +62,7 @@ PY_COMPILED_EXTS = ('so',) try: - STD_LIB_DIR = get_python_lib(standard_lib=1) + STD_LIB_DIR = get_python_lib(standard_lib=True) # get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to # non-valid path, see https://bugs.pypy.org/issue1164 except DistutilsPlatformError: @@ -69,8 +70,7 @@ EXT_LIB_DIR = get_python_lib() -BUILTIN_MODULES = dict(zip(sys.builtin_module_names, - [1]*len(sys.builtin_module_names))) +BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) class NoSourceFile(Exception): @@ -93,14 +93,14 @@ def _getobj(self): def __getattribute__(self, attr): try: return super(LazyObject, self).__getattribute__(attr) - except AttributeError, ex: + except AttributeError as ex: return getattr(self._getobj(), attr) def __call__(self, *args, **kwargs): return self._getobj()(*args, **kwargs) -def load_module_from_name(dotted_name, path=None, use_sys=1): +def load_module_from_name(dotted_name, path=None, use_sys=True): """Load a Python module from its name. :type dotted_name: str @@ -125,7 +125,7 @@ def load_module_from_name(dotted_name, path=None, use_sys=1): return load_module_from_modpath(dotted_name.split('.'), path, use_sys) -def load_module_from_modpath(parts, path=None, use_sys=1): +def load_module_from_modpath(parts, path=None, use_sys=True): """Load a python module from its splitted name. :type parts: list(str) or tuple(str) @@ -169,14 +169,16 @@ def load_module_from_modpath(parts, path=None, use_sys=1): if prevmodule: setattr(prevmodule, part, module) _file = getattr(module, '__file__', '') + prevmodule = module + if not _file and _is_namespace(curname): + continue if not _file and len(modpath) != len(parts): raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]) ) path = [dirname( _file )] - prevmodule = module return module -def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): +def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): """Load a Python module from it's path. :type filepath: str @@ -204,9 +206,11 @@ def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): def _check_init(path, mod_path): """check there are some __init__.py all along the way""" + modpath = [] for part in mod_path: + modpath.append(part) path = join(path, part) - if not _has_init(path): + if not _is_namespace('.'.join(modpath)) and not _has_init(path): return False return True @@ -455,13 +459,16 @@ def get_source_file(filename, include_no_ext=False): def cleanup_sys_modules(directories): """remove submodules of `directories` from `sys.modules`""" - for modname, module in sys.modules.items(): + cleaned = [] + for modname, module in list(sys.modules.items()): modfile = getattr(module, '__file__', None) if modfile: for directory in directories: if modfile.startswith(directory): + cleaned.append(modname) del sys.modules[modname] break + return cleaned def is_python_source(filename): @@ -472,7 +479,6 @@ def is_python_source(filename): return splitext(filename)[1][1:] in PY_SOURCE_EXTS - def is_standard_module(modname, std_path=(STD_LIB_DIR,)): """try to guess if a module is a standard python module (by default, see `std_path` parameter's description) @@ -481,7 +487,7 @@ def is_standard_module(modname, std_path=(STD_LIB_DIR,)): :param modname: name of the module we are interested in :type std_path: list(str) or tuple(str) - :param std_path: list of path considered has standard + :param std_path: list of path considered as standard :rtype: bool @@ -489,24 +495,28 @@ def is_standard_module(modname, std_path=(STD_LIB_DIR,)): true if the module: - is located on the path listed in one of the directory in `std_path` - is a built-in module + + Note: this function is known to return wrong values when inside virtualenv. + See https://www.logilab.org/ticket/294756. """ modname = modname.split('.')[0] try: filename = file_from_modpath([modname]) - except ImportError, ex: + except ImportError as ex: # import failed, i'm probably not so wrong by supposing it's # not standard... - return 0 + return False # modules which are not living in a file are considered standard # (sys and __builtin__ for instance) if filename is None: - return 1 + # we assume there are no namespaces in stdlib + return not _is_namespace(modname) filename = abspath(filename) if filename.startswith(EXT_LIB_DIR): - return 0 + return False for path in std_path: if filename.startswith(abspath(path)): - return 1 + return True return False @@ -581,6 +591,12 @@ def _search_zip(modpath, pic): except ImportError: pkg_resources = None + +def _is_namespace(modname): + return (pkg_resources is not None + and modname in pkg_resources._namespace_packages) + + def _module_file(modpath, path=None): """get a module type / file path @@ -612,11 +628,13 @@ def _module_file(modpath, path=None): except AttributeError: checkeggs = False # pkg_resources support (aka setuptools namespace packages) - if pkg_resources is not None and modpath[0] in pkg_resources._namespace_packages and len(modpath) > 1: + if (_is_namespace(modpath[0]) and modpath[0] in sys.modules): # setuptools has added into sys.modules a module object with proper # __path__, get back information from there module = sys.modules[modpath.pop(0)] path = module.__path__ + if not modpath: + return C_BUILTIN, None imported = [] while modpath: modname = modpath[0] diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py b/pymode/libs/logilab/common/optik_ext.py similarity index 96% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py rename to pymode/libs/logilab/common/optik_ext.py index 49d685b1..1fd2a7f8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/optik_ext.py +++ b/pymode/libs/logilab/common/optik_ext.py @@ -46,6 +46,8 @@ argument of this type will be converted to a float value in bytes according to byte units (b, kb, mb, gb, tb) """ +from __future__ import print_function + __docformat__ = "restructuredtext en" import re @@ -65,7 +67,9 @@ except ImportError: HAS_MX_DATETIME = False -from logilab.common.textutils import splitstrip +from logilab.common.textutils import splitstrip, TIME_UNITS, BYTE_UNITS, \ + apply_units + def check_regexp(option, opt, value): """check a regexp value by trying to compile it @@ -165,18 +169,15 @@ def check_color(option, opt, value): raise OptionValueError(msg % (opt, value)) def check_time(option, opt, value): - from logilab.common.textutils import TIME_UNITS, apply_units if isinstance(value, (int, long, float)): return value return apply_units(value, TIME_UNITS) def check_bytes(option, opt, value): - from logilab.common.textutils import BYTE_UNITS, apply_units if hasattr(value, '__int__'): return value return apply_units(value, BYTE_UNITS) -import types class Option(BaseOption): """override optik.Option to add some new option types @@ -211,7 +212,7 @@ def _check_choice(self): if self.choices is None: raise OptionError( "must supply a list of choices for type 'choice'", self) - elif type(self.choices) not in (types.TupleType, types.ListType): + elif not isinstance(self.choices, (tuple, list)): raise OptionError( "choices must be a list of strings ('%s' supplied)" % str(type(self.choices)).split("'")[1], self) @@ -382,9 +383,9 @@ def generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0): formatter = ManHelpFormatter() formatter.output_level = level formatter.parser = optparser - print >> stream, formatter.format_head(optparser, pkginfo, section) - print >> stream, optparser.format_option_help(formatter) - print >> stream, formatter.format_tail(pkginfo) + print(formatter.format_head(optparser, pkginfo, section), file=stream) + print(optparser.format_option_help(formatter), file=stream) + print(formatter.format_tail(pkginfo), file=stream) __all__ = ('OptionParser', 'Option', 'OptionGroup', 'OptionValueError', diff --git a/pymode/libs/logilab/common/optparser.py b/pymode/libs/logilab/common/optparser.py new file mode 100644 index 00000000..aa17750e --- /dev/null +++ b/pymode/libs/logilab/common/optparser.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Extend OptionParser with commands. + +Example: + +>>> parser = OptionParser() +>>> parser.usage = '%prog COMMAND [options] ...' +>>> parser.add_command('build', 'mymod.build') +>>> parser.add_command('clean', run_clean, add_opt_clean) +>>> run, options, args = parser.parse_command(sys.argv[1:]) +>>> return run(options, args[1:]) + +With mymod.build that defines two functions run and add_options +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from warnings import warn +warn('lgc.optparser module is deprecated, use lgc.clcommands instead', DeprecationWarning, + stacklevel=2) + +import sys +import optparse + +class OptionParser(optparse.OptionParser): + + def __init__(self, *args, **kwargs): + optparse.OptionParser.__init__(self, *args, **kwargs) + self._commands = {} + self.min_args, self.max_args = 0, 1 + + def add_command(self, name, mod_or_funcs, help=''): + """name of the command, name of module or tuple of functions + (run, add_options) + """ + assert isinstance(mod_or_funcs, str) or isinstance(mod_or_funcs, tuple), \ + "mod_or_funcs has to be a module name or a tuple of functions" + self._commands[name] = (mod_or_funcs, help) + + def print_main_help(self): + optparse.OptionParser.print_help(self) + print('\ncommands:') + for cmdname, (_, help) in self._commands.items(): + print('% 10s - %s' % (cmdname, help)) + + def parse_command(self, args): + if len(args) == 0: + self.print_main_help() + sys.exit(1) + cmd = args[0] + args = args[1:] + if cmd not in self._commands: + if cmd in ('-h', '--help'): + self.print_main_help() + sys.exit(0) + elif self.version is not None and cmd == "--version": + self.print_version() + sys.exit(0) + self.error('unknown command') + self.prog = '%s %s' % (self.prog, cmd) + mod_or_f, help = self._commands[cmd] + # optparse inserts self.description between usage and options help + self.description = help + if isinstance(mod_or_f, str): + exec('from %s import run, add_options' % mod_or_f) + else: + run, add_options = mod_or_f + add_options(self) + (options, args) = self.parse_args(args) + if not (self.min_args <= len(args) <= self.max_args): + self.error('incorrect number of arguments') + return run, options, args + + diff --git a/pymode/libs/logilab/common/proc.py b/pymode/libs/logilab/common/proc.py new file mode 100644 index 00000000..c27356c6 --- /dev/null +++ b/pymode/libs/logilab/common/proc.py @@ -0,0 +1,277 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""module providing: +* process information (linux specific: rely on /proc) +* a class for resource control (memory / time / cpu time) + +This module doesn't work on windows platforms (only tested on linux) + +:organization: Logilab + + + +""" +__docformat__ = "restructuredtext en" + +import os +import stat +from resource import getrlimit, setrlimit, RLIMIT_CPU, RLIMIT_AS +from signal import signal, SIGXCPU, SIGKILL, SIGUSR2, SIGUSR1 +from threading import Timer, currentThread, Thread, Event +from time import time + +from logilab.common.tree import Node + +class NoSuchProcess(Exception): pass + +def proc_exists(pid): + """check the a pid is registered in /proc + raise NoSuchProcess exception if not + """ + if not os.path.exists('/proc/%s' % pid): + raise NoSuchProcess() + +PPID = 3 +UTIME = 13 +STIME = 14 +CUTIME = 15 +CSTIME = 16 +VSIZE = 22 + +class ProcInfo(Node): + """provide access to process information found in /proc""" + + def __init__(self, pid): + self.pid = int(pid) + Node.__init__(self, self.pid) + proc_exists(self.pid) + self.file = '/proc/%s/stat' % self.pid + self.ppid = int(self.status()[PPID]) + + def memory_usage(self): + """return the memory usage of the process in Ko""" + try : + return int(self.status()[VSIZE]) + except IOError: + return 0 + + def lineage_memory_usage(self): + return self.memory_usage() + sum([child.lineage_memory_usage() + for child in self.children]) + + def time(self, children=0): + """return the number of jiffies that this process has been scheduled + in user and kernel mode""" + status = self.status() + time = int(status[UTIME]) + int(status[STIME]) + if children: + time += int(status[CUTIME]) + int(status[CSTIME]) + return time + + def status(self): + """return the list of fields found in /proc//stat""" + return open(self.file).read().split() + + def name(self): + """return the process name found in /proc//stat + """ + return self.status()[1].strip('()') + + def age(self): + """return the age of the process + """ + return os.stat(self.file)[stat.ST_MTIME] + +class ProcInfoLoader: + """manage process information""" + + def __init__(self): + self._loaded = {} + + def list_pids(self): + """return a list of existent process ids""" + for subdir in os.listdir('/proc'): + if subdir.isdigit(): + yield int(subdir) + + def load(self, pid): + """get a ProcInfo object for a given pid""" + pid = int(pid) + try: + return self._loaded[pid] + except KeyError: + procinfo = ProcInfo(pid) + procinfo.manager = self + self._loaded[pid] = procinfo + return procinfo + + + def load_all(self): + """load all processes information""" + for pid in self.list_pids(): + try: + procinfo = self.load(pid) + if procinfo.parent is None and procinfo.ppid: + pprocinfo = self.load(procinfo.ppid) + pprocinfo.append(procinfo) + except NoSuchProcess: + pass + + +try: + class ResourceError(BaseException): + """Error raise when resource limit is reached""" + limit = "Unknown Resource Limit" +except NameError: + class ResourceError(Exception): + """Error raise when resource limit is reached""" + limit = "Unknown Resource Limit" + + +class XCPUError(ResourceError): + """Error raised when CPU Time limit is reached""" + limit = "CPU Time" + +class LineageMemoryError(ResourceError): + """Error raised when the total amount of memory used by a process and + it's child is reached""" + limit = "Lineage total Memory" + +class TimeoutError(ResourceError): + """Error raised when the process is running for to much time""" + limit = "Real Time" + +# Can't use subclass because the StandardError MemoryError raised +RESOURCE_LIMIT_EXCEPTION = (ResourceError, MemoryError) + + +class MemorySentinel(Thread): + """A class checking a process don't use too much memory in a separated + daemonic thread + """ + def __init__(self, interval, memory_limit, gpid=os.getpid()): + Thread.__init__(self, target=self._run, name="Test.Sentinel") + self.memory_limit = memory_limit + self._stop = Event() + self.interval = interval + self.setDaemon(True) + self.gpid = gpid + + def stop(self): + """stop ap""" + self._stop.set() + + def _run(self): + pil = ProcInfoLoader() + while not self._stop.isSet(): + if self.memory_limit <= pil.load(self.gpid).lineage_memory_usage(): + os.killpg(self.gpid, SIGUSR1) + self._stop.wait(self.interval) + + +class ResourceController: + + def __init__(self, max_cpu_time=None, max_time=None, max_memory=None, + max_reprieve=60): + if SIGXCPU == -1: + raise RuntimeError("Unsupported platform") + self.max_time = max_time + self.max_memory = max_memory + self.max_cpu_time = max_cpu_time + self._reprieve = max_reprieve + self._timer = None + self._msentinel = None + self._old_max_memory = None + self._old_usr1_hdlr = None + self._old_max_cpu_time = None + self._old_usr2_hdlr = None + self._old_sigxcpu_hdlr = None + self._limit_set = 0 + self._abort_try = 0 + self._start_time = None + self._elapse_time = 0 + + def _hangle_sig_timeout(self, sig, frame): + raise TimeoutError() + + def _hangle_sig_memory(self, sig, frame): + if self._abort_try < self._reprieve: + self._abort_try += 1 + raise LineageMemoryError("Memory limit reached") + else: + os.killpg(os.getpid(), SIGKILL) + + def _handle_sigxcpu(self, sig, frame): + if self._abort_try < self._reprieve: + self._abort_try += 1 + raise XCPUError("Soft CPU time limit reached") + else: + os.killpg(os.getpid(), SIGKILL) + + def _time_out(self): + if self._abort_try < self._reprieve: + self._abort_try += 1 + os.killpg(os.getpid(), SIGUSR2) + if self._limit_set > 0: + self._timer = Timer(1, self._time_out) + self._timer.start() + else: + os.killpg(os.getpid(), SIGKILL) + + def setup_limit(self): + """set up the process limit""" + assert currentThread().getName() == 'MainThread' + os.setpgrp() + if self._limit_set <= 0: + if self.max_time is not None: + self._old_usr2_hdlr = signal(SIGUSR2, self._hangle_sig_timeout) + self._timer = Timer(max(1, int(self.max_time) - self._elapse_time), + self._time_out) + self._start_time = int(time()) + self._timer.start() + if self.max_cpu_time is not None: + self._old_max_cpu_time = getrlimit(RLIMIT_CPU) + cpu_limit = (int(self.max_cpu_time), self._old_max_cpu_time[1]) + self._old_sigxcpu_hdlr = signal(SIGXCPU, self._handle_sigxcpu) + setrlimit(RLIMIT_CPU, cpu_limit) + if self.max_memory is not None: + self._msentinel = MemorySentinel(1, int(self.max_memory) ) + self._old_max_memory = getrlimit(RLIMIT_AS) + self._old_usr1_hdlr = signal(SIGUSR1, self._hangle_sig_memory) + as_limit = (int(self.max_memory), self._old_max_memory[1]) + setrlimit(RLIMIT_AS, as_limit) + self._msentinel.start() + self._limit_set += 1 + + def clean_limit(self): + """reinstall the old process limit""" + if self._limit_set > 0: + if self.max_time is not None: + self._timer.cancel() + self._elapse_time += int(time())-self._start_time + self._timer = None + signal(SIGUSR2, self._old_usr2_hdlr) + if self.max_cpu_time is not None: + setrlimit(RLIMIT_CPU, self._old_max_cpu_time) + signal(SIGXCPU, self._old_sigxcpu_hdlr) + if self.max_memory is not None: + self._msentinel.stop() + self._msentinel = None + setrlimit(RLIMIT_AS, self._old_max_memory) + signal(SIGUSR1, self._old_usr1_hdlr) + self._limit_set -= 1 diff --git a/pymode/libs/logilab/common/pytest.py b/pymode/libs/logilab/common/pytest.py new file mode 100644 index 00000000..3d8aca34 --- /dev/null +++ b/pymode/libs/logilab/common/pytest.py @@ -0,0 +1,1202 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""pytest is a tool that eases test running and debugging. + +To be able to use pytest, you should either write tests using +the logilab.common.testlib's framework or the unittest module of the +Python's standard library. + +You can customize pytest's behaviour by defining a ``pytestconf.py`` file +somewhere in your test directory. In this file, you can add options or +change the way tests are run. + +To add command line options, you must define a ``update_parser`` function in +your ``pytestconf.py`` file. The function must accept a single parameter +that will be the OptionParser's instance to customize. + +If you wish to customize the tester, you'll have to define a class named +``CustomPyTester``. This class should extend the default `PyTester` class +defined in the pytest module. Take a look at the `PyTester` and `DjangoTester` +classes for more information about what can be done. + +For instance, if you wish to add a custom -l option to specify a loglevel, you +could define the following ``pytestconf.py`` file :: + + import logging + from logilab.common.pytest import PyTester + + def update_parser(parser): + parser.add_option('-l', '--loglevel', dest='loglevel', action='store', + choices=('debug', 'info', 'warning', 'error', 'critical'), + default='critical', help="the default log level possible choices are " + "('debug', 'info', 'warning', 'error', 'critical')") + return parser + + + class CustomPyTester(PyTester): + def __init__(self, cvg, options): + super(CustomPyTester, self).__init__(cvg, options) + loglevel = options.loglevel.upper() + logger = logging.getLogger('erudi') + logger.setLevel(logging.getLevelName(loglevel)) + + +In your TestCase class you can then get the value of a specific option with +the ``optval`` method:: + + class MyTestCase(TestCase): + def test_foo(self): + loglevel = self.optval('loglevel') + # ... + + +You can also tag your tag your test for fine filtering + +With those tag:: + + from logilab.common.testlib import tag, TestCase + + class Exemple(TestCase): + + @tag('rouge', 'carre') + def toto(self): + pass + + @tag('carre', 'vert') + def tata(self): + pass + + @tag('rouge') + def titi(test): + pass + +you can filter the function with a simple python expression + + * ``toto`` and ``titi`` match ``rouge`` + * ``toto``, ``tata`` and ``titi``, match ``rouge or carre`` + * ``tata`` and ``titi`` match``rouge ^ carre`` + * ``titi`` match ``rouge and not carre`` +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +PYTEST_DOC = """%prog [OPTIONS] [testfile [testpattern]] + +examples: + +pytest path/to/mytests.py +pytest path/to/mytests.py TheseTests +pytest path/to/mytests.py TheseTests.test_thisone +pytest path/to/mytests.py -m '(not long and database) or regr' + +pytest one (will run both test_thisone and test_thatone) +pytest path/to/mytests.py -s not (will skip test_notthisone) +""" + +ENABLE_DBC = False +FILE_RESTART = ".pytest.restart" + +import os, sys, re +import os.path as osp +from time import time, clock +import warnings +import types +from inspect import isgeneratorfunction, isclass +from contextlib import contextmanager +from random import shuffle + +from logilab.common.fileutils import abspath_listdir +from logilab.common import textutils +from logilab.common import testlib, STD_BLACKLIST +# use the same unittest module as testlib +from logilab.common.testlib import unittest, start_interactive_mode +from logilab.common.deprecation import deprecated +import doctest + +import unittest as unittest_legacy +if not getattr(unittest_legacy, "__package__", None): + try: + import unittest2.suite as unittest_suite + except ImportError: + sys.exit("You have to install python-unittest2 to use this module") +else: + import unittest.suite as unittest_suite + +try: + import django + from logilab.common.modutils import modpath_from_file, load_module_from_modpath + DJANGO_FOUND = True +except ImportError: + DJANGO_FOUND = False + +CONF_FILE = 'pytestconf.py' + +## coverage pausing tools + +@contextmanager +def replace_trace(trace=None): + """A context manager that temporary replaces the trace function""" + oldtrace = sys.gettrace() + sys.settrace(trace) + try: + yield + finally: + # specific hack to work around a bug in pycoverage, see + # https://bitbucket.org/ned/coveragepy/issue/123 + if (oldtrace is not None and not callable(oldtrace) and + hasattr(oldtrace, 'pytrace')): + oldtrace = oldtrace.pytrace + sys.settrace(oldtrace) + + +def pause_trace(): + """A context manager that temporary pauses any tracing""" + return replace_trace() + +class TraceController(object): + ctx_stack = [] + + @classmethod + @deprecated('[lgc 0.63.1] Use the pause_trace() context manager') + def pause_tracing(cls): + cls.ctx_stack.append(pause_trace()) + cls.ctx_stack[-1].__enter__() + + @classmethod + @deprecated('[lgc 0.63.1] Use the pause_trace() context manager') + def resume_tracing(cls): + cls.ctx_stack.pop().__exit__(None, None, None) + + +pause_tracing = TraceController.pause_tracing +resume_tracing = TraceController.resume_tracing + + +def nocoverage(func): + """Function decorator that pauses tracing functions""" + if hasattr(func, 'uncovered'): + return func + func.uncovered = True + + def not_covered(*args, **kwargs): + with pause_trace(): + return func(*args, **kwargs) + not_covered.uncovered = True + return not_covered + +## end of coverage pausing tools + + +TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$") +def this_is_a_testfile(filename): + """returns True if `filename` seems to be a test file""" + return TESTFILE_RE.match(osp.basename(filename)) + +TESTDIR_RE = re.compile("^(unit)?tests?$") +def this_is_a_testdir(dirpath): + """returns True if `filename` seems to be a test directory""" + return TESTDIR_RE.match(osp.basename(dirpath)) + + +def load_pytest_conf(path, parser): + """loads a ``pytestconf.py`` file and update default parser + and / or tester. + """ + namespace = {} + exec(open(path, 'rb').read(), namespace) + if 'update_parser' in namespace: + namespace['update_parser'](parser) + return namespace.get('CustomPyTester', PyTester) + + +def project_root(parser, projdir=os.getcwd()): + """try to find project's root and add it to sys.path""" + previousdir = curdir = osp.abspath(projdir) + testercls = PyTester + conf_file_path = osp.join(curdir, CONF_FILE) + if osp.isfile(conf_file_path): + testercls = load_pytest_conf(conf_file_path, parser) + while this_is_a_testdir(curdir) or \ + osp.isfile(osp.join(curdir, '__init__.py')): + newdir = osp.normpath(osp.join(curdir, os.pardir)) + if newdir == curdir: + break + previousdir = curdir + curdir = newdir + conf_file_path = osp.join(curdir, CONF_FILE) + if osp.isfile(conf_file_path): + testercls = load_pytest_conf(conf_file_path, parser) + return previousdir, testercls + + +class GlobalTestReport(object): + """this class holds global test statistics""" + def __init__(self): + self.ran = 0 + self.skipped = 0 + self.failures = 0 + self.errors = 0 + self.ttime = 0 + self.ctime = 0 + self.modulescount = 0 + self.errmodules = [] + + def feed(self, filename, testresult, ttime, ctime): + """integrates new test information into internal statistics""" + ran = testresult.testsRun + self.ran += ran + self.skipped += len(getattr(testresult, 'skipped', ())) + self.failures += len(testresult.failures) + self.errors += len(testresult.errors) + self.ttime += ttime + self.ctime += ctime + self.modulescount += 1 + if not testresult.wasSuccessful(): + problems = len(testresult.failures) + len(testresult.errors) + self.errmodules.append((filename[:-3], problems, ran)) + + def failed_to_test_module(self, filename): + """called when the test module could not be imported by unittest + """ + self.errors += 1 + self.modulescount += 1 + self.ran += 1 + self.errmodules.append((filename[:-3], 1, 1)) + + def skip_module(self, filename): + self.modulescount += 1 + self.ran += 1 + self.errmodules.append((filename[:-3], 0, 0)) + + def __str__(self): + """this is just presentation stuff""" + line1 = ['Ran %s test cases in %.2fs (%.2fs CPU)' + % (self.ran, self.ttime, self.ctime)] + if self.errors: + line1.append('%s errors' % self.errors) + if self.failures: + line1.append('%s failures' % self.failures) + if self.skipped: + line1.append('%s skipped' % self.skipped) + modulesok = self.modulescount - len(self.errmodules) + if self.errors or self.failures: + line2 = '%s modules OK (%s failed)' % (modulesok, + len(self.errmodules)) + descr = ', '.join(['%s [%s/%s]' % info for info in self.errmodules]) + line3 = '\nfailures: %s' % descr + elif modulesok: + line2 = 'All %s modules OK' % modulesok + line3 = '' + else: + return '' + return '%s\n%s%s' % (', '.join(line1), line2, line3) + + + +def remove_local_modules_from_sys(testdir): + """remove all modules from cache that come from `testdir` + + This is used to avoid strange side-effects when using the + testall() mode of pytest. + For instance, if we run pytest on this tree:: + + A/test/test_utils.py + B/test/test_utils.py + + we **have** to clean sys.modules to make sure the correct test_utils + module is ran in B + """ + for modname, mod in list(sys.modules.items()): + if mod is None: + continue + if not hasattr(mod, '__file__'): + # this is the case of some built-in modules like sys, imp, marshal + continue + modfile = mod.__file__ + # if modfile is not an absolute path, it was probably loaded locally + # during the tests + if not osp.isabs(modfile) or modfile.startswith(testdir): + del sys.modules[modname] + + + +class PyTester(object): + """encapsulates testrun logic""" + + def __init__(self, cvg, options): + self.report = GlobalTestReport() + self.cvg = cvg + self.options = options + self.firstwrite = True + self._errcode = None + + def show_report(self): + """prints the report and returns appropriate exitcode""" + # everything has been ran, print report + print("*" * 79) + print(self.report) + + def get_errcode(self): + # errcode set explicitly + if self._errcode is not None: + return self._errcode + return self.report.failures + self.report.errors + + def set_errcode(self, errcode): + self._errcode = errcode + errcode = property(get_errcode, set_errcode) + + def testall(self, exitfirst=False): + """walks through current working directory, finds something + which can be considered as a testdir and runs every test there + """ + here = os.getcwd() + for dirname, dirs, _ in os.walk(here): + for skipped in STD_BLACKLIST: + if skipped in dirs: + dirs.remove(skipped) + basename = osp.basename(dirname) + if this_is_a_testdir(basename): + print("going into", dirname) + # we found a testdir, let's explore it ! + if not self.testonedir(dirname, exitfirst): + break + dirs[:] = [] + if self.report.ran == 0: + print("no test dir found testing here:", here) + # if no test was found during the visit, consider + # the local directory as a test directory even if + # it doesn't have a traditional test directory name + self.testonedir(here) + + def testonedir(self, testdir, exitfirst=False): + """finds each testfile in the `testdir` and runs it + + return true when all tests has been executed, false if exitfirst and + some test has failed. + """ + files = abspath_listdir(testdir) + shuffle(files) + for filename in files: + if this_is_a_testfile(filename): + if self.options.exitfirst and not self.options.restart: + # overwrite restart file + try: + restartfile = open(FILE_RESTART, "w") + restartfile.close() + except Exception: + print("Error while overwriting succeeded test file :", + osp.join(os.getcwd(), FILE_RESTART), + file=sys.__stderr__) + raise + # run test and collect information + prog = self.testfile(filename, batchmode=True) + if exitfirst and (prog is None or not prog.result.wasSuccessful()): + return False + self.firstwrite = True + # clean local modules + remove_local_modules_from_sys(testdir) + return True + + def testfile(self, filename, batchmode=False): + """runs every test in `filename` + + :param filename: an absolute path pointing to a unittest file + """ + here = os.getcwd() + dirname = osp.dirname(filename) + if dirname: + os.chdir(dirname) + # overwrite restart file if it has not been done already + if self.options.exitfirst and not self.options.restart and self.firstwrite: + try: + restartfile = open(FILE_RESTART, "w") + restartfile.close() + except Exception: + print("Error while overwriting succeeded test file :", + osp.join(os.getcwd(), FILE_RESTART), file=sys.__stderr__) + raise + modname = osp.basename(filename)[:-3] + print((' %s ' % osp.basename(filename)).center(70, '='), + file=sys.__stderr__) + try: + tstart, cstart = time(), clock() + try: + testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg, + options=self.options, outstream=sys.stderr) + except KeyboardInterrupt: + raise + except SystemExit as exc: + self.errcode = exc.code + raise + except testlib.SkipTest: + print("Module skipped:", filename) + self.report.skip_module(filename) + return None + except Exception: + self.report.failed_to_test_module(filename) + print('unhandled exception occurred while testing', modname, + file=sys.stderr) + import traceback + traceback.print_exc(file=sys.stderr) + return None + + tend, cend = time(), clock() + ttime, ctime = (tend - tstart), (cend - cstart) + self.report.feed(filename, testprog.result, ttime, ctime) + return testprog + finally: + if dirname: + os.chdir(here) + + + +class DjangoTester(PyTester): + + def load_django_settings(self, dirname): + """try to find project's setting and load it""" + curdir = osp.abspath(dirname) + previousdir = curdir + while not osp.isfile(osp.join(curdir, 'settings.py')) and \ + osp.isfile(osp.join(curdir, '__init__.py')): + newdir = osp.normpath(osp.join(curdir, os.pardir)) + if newdir == curdir: + raise AssertionError('could not find settings.py') + previousdir = curdir + curdir = newdir + # late django initialization + settings = load_module_from_modpath(modpath_from_file(osp.join(curdir, 'settings.py'))) + from django.core.management import setup_environ + setup_environ(settings) + settings.DEBUG = False + self.settings = settings + # add settings dir to pythonpath since it's the project's root + if curdir not in sys.path: + sys.path.insert(1, curdir) + + def before_testfile(self): + # Those imports must be done **after** setup_environ was called + from django.test.utils import setup_test_environment + from django.test.utils import create_test_db + setup_test_environment() + create_test_db(verbosity=0) + self.dbname = self.settings.TEST_DATABASE_NAME + + def after_testfile(self): + # Those imports must be done **after** setup_environ was called + from django.test.utils import teardown_test_environment + from django.test.utils import destroy_test_db + teardown_test_environment() + print('destroying', self.dbname) + destroy_test_db(self.dbname, verbosity=0) + + def testall(self, exitfirst=False): + """walks through current working directory, finds something + which can be considered as a testdir and runs every test there + """ + for dirname, dirs, files in os.walk(os.getcwd()): + for skipped in ('CVS', '.svn', '.hg'): + if skipped in dirs: + dirs.remove(skipped) + if 'tests.py' in files: + if not self.testonedir(dirname, exitfirst): + break + dirs[:] = [] + else: + basename = osp.basename(dirname) + if basename in ('test', 'tests'): + print("going into", dirname) + # we found a testdir, let's explore it ! + if not self.testonedir(dirname, exitfirst): + break + dirs[:] = [] + + def testonedir(self, testdir, exitfirst=False): + """finds each testfile in the `testdir` and runs it + + return true when all tests has been executed, false if exitfirst and + some test has failed. + """ + # special django behaviour : if tests are splitted in several files, + # remove the main tests.py file and tests each test file separately + testfiles = [fpath for fpath in abspath_listdir(testdir) + if this_is_a_testfile(fpath)] + if len(testfiles) > 1: + try: + testfiles.remove(osp.join(testdir, 'tests.py')) + except ValueError: + pass + for filename in testfiles: + # run test and collect information + prog = self.testfile(filename, batchmode=True) + if exitfirst and (prog is None or not prog.result.wasSuccessful()): + return False + # clean local modules + remove_local_modules_from_sys(testdir) + return True + + def testfile(self, filename, batchmode=False): + """runs every test in `filename` + + :param filename: an absolute path pointing to a unittest file + """ + here = os.getcwd() + dirname = osp.dirname(filename) + if dirname: + os.chdir(dirname) + self.load_django_settings(dirname) + modname = osp.basename(filename)[:-3] + print((' %s ' % osp.basename(filename)).center(70, '='), + file=sys.stderr) + try: + try: + tstart, cstart = time(), clock() + self.before_testfile() + testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg) + tend, cend = time(), clock() + ttime, ctime = (tend - tstart), (cend - cstart) + self.report.feed(filename, testprog.result, ttime, ctime) + return testprog + except SystemExit: + raise + except Exception as exc: + import traceback + traceback.print_exc() + self.report.failed_to_test_module(filename) + print('unhandled exception occurred while testing', modname) + print('error: %s' % exc) + return None + finally: + self.after_testfile() + if dirname: + os.chdir(here) + + +def make_parser(): + """creates the OptionParser instance + """ + from optparse import OptionParser + parser = OptionParser(usage=PYTEST_DOC) + + parser.newargs = [] + def rebuild_cmdline(option, opt, value, parser): + """carry the option to unittest_main""" + parser.newargs.append(opt) + + def rebuild_and_store(option, opt, value, parser): + """carry the option to unittest_main and store + the value on current parser + """ + parser.newargs.append(opt) + setattr(parser.values, option.dest, True) + + def capture_and_rebuild(option, opt, value, parser): + warnings.simplefilter('ignore', DeprecationWarning) + rebuild_cmdline(option, opt, value, parser) + + # pytest options + parser.add_option('-t', dest='testdir', default=None, + help="directory where the tests will be found") + parser.add_option('-d', dest='dbc', default=False, + action="store_true", help="enable design-by-contract") + # unittest_main options provided and passed through pytest + parser.add_option('-v', '--verbose', callback=rebuild_cmdline, + action="callback", help="Verbose output") + parser.add_option('-i', '--pdb', callback=rebuild_and_store, + dest="pdb", action="callback", + help="Enable test failure inspection") + parser.add_option('-x', '--exitfirst', callback=rebuild_and_store, + dest="exitfirst", default=False, + action="callback", help="Exit on first failure " + "(only make sense when pytest run one test file)") + parser.add_option('-R', '--restart', callback=rebuild_and_store, + dest="restart", default=False, + action="callback", + help="Restart tests from where it failed (implies exitfirst) " + "(only make sense if tests previously ran with exitfirst only)") + parser.add_option('--color', callback=rebuild_cmdline, + action="callback", + help="colorize tracebacks") + parser.add_option('-s', '--skip', + # XXX: I wish I could use the callback action but it + # doesn't seem to be able to get the value + # associated to the option + action="store", dest="skipped", default=None, + help="test names matching this name will be skipped " + "to skip several patterns, use commas") + parser.add_option('-q', '--quiet', callback=rebuild_cmdline, + action="callback", help="Minimal output") + parser.add_option('-P', '--profile', default=None, dest='profile', + help="Profile execution and store data in the given file") + parser.add_option('-m', '--match', default=None, dest='tags_pattern', + help="only execute test whose tag match the current pattern") + + if DJANGO_FOUND: + parser.add_option('-J', '--django', dest='django', default=False, + action="store_true", + help='use pytest for django test cases') + return parser + + +def parseargs(parser): + """Parse the command line and return (options processed), (options to pass to + unittest_main()), (explicitfile or None). + """ + # parse the command line + options, args = parser.parse_args() + filenames = [arg for arg in args if arg.endswith('.py')] + if filenames: + if len(filenames) > 1: + parser.error("only one filename is acceptable") + explicitfile = filenames[0] + args.remove(explicitfile) + else: + explicitfile = None + # someone wants DBC + testlib.ENABLE_DBC = options.dbc + newargs = parser.newargs + if options.skipped: + newargs.extend(['--skip', options.skipped]) + # restart implies exitfirst + if options.restart: + options.exitfirst = True + # append additional args to the new sys.argv and let unittest_main + # do the rest + newargs += args + return options, explicitfile + + + +def run(): + parser = make_parser() + rootdir, testercls = project_root(parser) + options, explicitfile = parseargs(parser) + # mock a new command line + sys.argv[1:] = parser.newargs + cvg = None + if not '' in sys.path: + sys.path.insert(0, '') + if DJANGO_FOUND and options.django: + tester = DjangoTester(cvg, options) + else: + tester = testercls(cvg, options) + if explicitfile: + cmd, args = tester.testfile, (explicitfile,) + elif options.testdir: + cmd, args = tester.testonedir, (options.testdir, options.exitfirst) + else: + cmd, args = tester.testall, (options.exitfirst,) + try: + try: + if options.profile: + import hotshot + prof = hotshot.Profile(options.profile) + prof.runcall(cmd, *args) + prof.close() + print('profile data saved in', options.profile) + else: + cmd(*args) + except SystemExit: + raise + except: + import traceback + traceback.print_exc() + finally: + tester.show_report() + sys.exit(tester.errcode) + +class SkipAwareTestProgram(unittest.TestProgram): + # XXX: don't try to stay close to unittest.py, use optparse + USAGE = """\ +Usage: %(progName)s [options] [test] [...] + +Options: + -h, --help Show this message + -v, --verbose Verbose output + -i, --pdb Enable test failure inspection + -x, --exitfirst Exit on first failure + -s, --skip skip test matching this pattern (no regexp for now) + -q, --quiet Minimal output + --color colorize tracebacks + + -m, --match Run only test whose tag match this pattern + + -P, --profile FILE: Run the tests using cProfile and saving results + in FILE + +Examples: + %(progName)s - run default set of tests + %(progName)s MyTestSuite - run suite 'MyTestSuite' + %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething + %(progName)s MyTestCase - run all 'test*' test methods + in MyTestCase +""" + def __init__(self, module='__main__', defaultTest=None, batchmode=False, + cvg=None, options=None, outstream=sys.stderr): + self.batchmode = batchmode + self.cvg = cvg + self.options = options + self.outstream = outstream + super(SkipAwareTestProgram, self).__init__( + module=module, defaultTest=defaultTest, + testLoader=NonStrictTestLoader()) + + def parseArgs(self, argv): + self.pdbmode = False + self.exitfirst = False + self.skipped_patterns = [] + self.test_pattern = None + self.tags_pattern = None + self.colorize = False + self.profile_name = None + import getopt + try: + options, args = getopt.getopt(argv[1:], 'hHvixrqcp:s:m:P:', + ['help', 'verbose', 'quiet', 'pdb', + 'exitfirst', 'restart', + 'skip=', 'color', 'match=', 'profile=']) + for opt, value in options: + if opt in ('-h', '-H', '--help'): + self.usageExit() + if opt in ('-i', '--pdb'): + self.pdbmode = True + if opt in ('-x', '--exitfirst'): + self.exitfirst = True + if opt in ('-r', '--restart'): + self.restart = True + self.exitfirst = True + if opt in ('-q', '--quiet'): + self.verbosity = 0 + if opt in ('-v', '--verbose'): + self.verbosity = 2 + if opt in ('-s', '--skip'): + self.skipped_patterns = [pat.strip() for pat in + value.split(', ')] + if opt == '--color': + self.colorize = True + if opt in ('-m', '--match'): + #self.tags_pattern = value + self.options["tag_pattern"] = value + if opt in ('-P', '--profile'): + self.profile_name = value + self.testLoader.skipped_patterns = self.skipped_patterns + if len(args) == 0 and self.defaultTest is None: + suitefunc = getattr(self.module, 'suite', None) + if isinstance(suitefunc, (types.FunctionType, + types.MethodType)): + self.test = self.module.suite() + else: + self.test = self.testLoader.loadTestsFromModule(self.module) + return + if len(args) > 0: + self.test_pattern = args[0] + self.testNames = args + else: + self.testNames = (self.defaultTest, ) + self.createTests() + except getopt.error as msg: + self.usageExit(msg) + + def runTests(self): + if self.profile_name: + import cProfile + cProfile.runctx('self._runTests()', globals(), locals(), self.profile_name ) + else: + return self._runTests() + + def _runTests(self): + self.testRunner = SkipAwareTextTestRunner(verbosity=self.verbosity, + stream=self.outstream, + exitfirst=self.exitfirst, + pdbmode=self.pdbmode, + cvg=self.cvg, + test_pattern=self.test_pattern, + skipped_patterns=self.skipped_patterns, + colorize=self.colorize, + batchmode=self.batchmode, + options=self.options) + + def removeSucceededTests(obj, succTests): + """ Recursive function that removes succTests from + a TestSuite or TestCase + """ + if isinstance(obj, unittest.TestSuite): + removeSucceededTests(obj._tests, succTests) + if isinstance(obj, list): + for el in obj[:]: + if isinstance(el, unittest.TestSuite): + removeSucceededTests(el, succTests) + elif isinstance(el, unittest.TestCase): + descr = '.'.join((el.__class__.__module__, + el.__class__.__name__, + el._testMethodName)) + if descr in succTests: + obj.remove(el) + # take care, self.options may be None + if getattr(self.options, 'restart', False): + # retrieve succeeded tests from FILE_RESTART + try: + restartfile = open(FILE_RESTART, 'r') + try: + succeededtests = list(elem.rstrip('\n\r') for elem in + restartfile.readlines()) + removeSucceededTests(self.test, succeededtests) + finally: + restartfile.close() + except Exception as ex: + raise Exception("Error while reading succeeded tests into %s: %s" + % (osp.join(os.getcwd(), FILE_RESTART), ex)) + + result = self.testRunner.run(self.test) + # help garbage collection: we want TestSuite, which hold refs to every + # executed TestCase, to be gc'ed + del self.test + if getattr(result, "debuggers", None) and \ + getattr(self, "pdbmode", None): + start_interactive_mode(result) + if not getattr(self, "batchmode", None): + sys.exit(not result.wasSuccessful()) + self.result = result + + +class SkipAwareTextTestRunner(unittest.TextTestRunner): + + def __init__(self, stream=sys.stderr, verbosity=1, + exitfirst=False, pdbmode=False, cvg=None, test_pattern=None, + skipped_patterns=(), colorize=False, batchmode=False, + options=None): + super(SkipAwareTextTestRunner, self).__init__(stream=stream, + verbosity=verbosity) + self.exitfirst = exitfirst + self.pdbmode = pdbmode + self.cvg = cvg + self.test_pattern = test_pattern + self.skipped_patterns = skipped_patterns + self.colorize = colorize + self.batchmode = batchmode + self.options = options + + def _this_is_skipped(self, testedname): + return any([(pat in testedname) for pat in self.skipped_patterns]) + + def _runcondition(self, test, skipgenerator=True): + if isinstance(test, testlib.InnerTest): + testname = test.name + else: + if isinstance(test, testlib.TestCase): + meth = test._get_test_method() + testname = '%s.%s' % (test.__name__, meth.__name__) + elif isinstance(test, types.FunctionType): + func = test + testname = func.__name__ + elif isinstance(test, types.MethodType): + cls = test.__self__.__class__ + testname = '%s.%s' % (cls.__name__, test.__name__) + else: + return True # Not sure when this happens + if isgeneratorfunction(test) and skipgenerator: + return self.does_match_tags(test) # Let inner tests decide at run time + if self._this_is_skipped(testname): + return False # this was explicitly skipped + if self.test_pattern is not None: + try: + classpattern, testpattern = self.test_pattern.split('.') + klass, name = testname.split('.') + if classpattern not in klass or testpattern not in name: + return False + except ValueError: + if self.test_pattern not in testname: + return False + + return self.does_match_tags(test) + + def does_match_tags(self, test): + if self.options is not None: + tags_pattern = getattr(self.options, 'tags_pattern', None) + if tags_pattern is not None: + tags = getattr(test, 'tags', testlib.Tags()) + if tags.inherit and isinstance(test, types.MethodType): + tags = tags | getattr(test.__self__.__class__, 'tags', testlib.Tags()) + return tags.match(tags_pattern) + return True # no pattern + + def _makeResult(self): + return testlib.SkipAwareTestResult(self.stream, self.descriptions, + self.verbosity, self.exitfirst, + self.pdbmode, self.cvg, self.colorize) + + def run(self, test): + "Run the given test case or test suite." + result = self._makeResult() + startTime = time() + test(result, runcondition=self._runcondition, options=self.options) + stopTime = time() + timeTaken = stopTime - startTime + result.printErrors() + if not self.batchmode: + self.stream.writeln(result.separator2) + run = result.testsRun + self.stream.writeln("Ran %d test%s in %.3fs" % + (run, run != 1 and "s" or "", timeTaken)) + self.stream.writeln() + if not result.wasSuccessful(): + if self.colorize: + self.stream.write(textutils.colorize_ansi("FAILED", color='red')) + else: + self.stream.write("FAILED") + else: + if self.colorize: + self.stream.write(textutils.colorize_ansi("OK", color='green')) + else: + self.stream.write("OK") + failed, errored, skipped = map(len, (result.failures, + result.errors, + result.skipped)) + + det_results = [] + for name, value in (("failures", result.failures), + ("errors",result.errors), + ("skipped", result.skipped)): + if value: + det_results.append("%s=%i" % (name, len(value))) + if det_results: + self.stream.write(" (") + self.stream.write(', '.join(det_results)) + self.stream.write(")") + self.stream.writeln("") + return result + +class NonStrictTestLoader(unittest.TestLoader): + """ + Overrides default testloader to be able to omit classname when + specifying tests to run on command line. + + For example, if the file test_foo.py contains :: + + class FooTC(TestCase): + def test_foo1(self): # ... + def test_foo2(self): # ... + def test_bar1(self): # ... + + class BarTC(TestCase): + def test_bar2(self): # ... + + 'python test_foo.py' will run the 3 tests in FooTC + 'python test_foo.py FooTC' will run the 3 tests in FooTC + 'python test_foo.py test_foo' will run test_foo1 and test_foo2 + 'python test_foo.py test_foo1' will run test_foo1 + 'python test_foo.py test_bar' will run FooTC.test_bar1 and BarTC.test_bar2 + """ + + def __init__(self): + self.skipped_patterns = () + + # some magic here to accept empty list by extending + # and to provide callable capability + def loadTestsFromNames(self, names, module=None): + suites = [] + for name in names: + suites.extend(self.loadTestsFromName(name, module)) + return self.suiteClass(suites) + + def _collect_tests(self, module): + tests = {} + for obj in vars(module).values(): + if isclass(obj) and issubclass(obj, unittest.TestCase): + classname = obj.__name__ + if classname[0] == '_' or self._this_is_skipped(classname): + continue + methodnames = [] + # obj is a TestCase class + for attrname in dir(obj): + if attrname.startswith(self.testMethodPrefix): + attr = getattr(obj, attrname) + if callable(attr): + methodnames.append(attrname) + # keep track of class (obj) for convenience + tests[classname] = (obj, methodnames) + return tests + + def loadTestsFromSuite(self, module, suitename): + try: + suite = getattr(module, suitename)() + except AttributeError: + return [] + assert hasattr(suite, '_tests'), \ + "%s.%s is not a valid TestSuite" % (module.__name__, suitename) + # python2.3 does not implement __iter__ on suites, we need to return + # _tests explicitly + return suite._tests + + def loadTestsFromName(self, name, module=None): + parts = name.split('.') + if module is None or len(parts) > 2: + # let the base class do its job here + return [super(NonStrictTestLoader, self).loadTestsFromName(name)] + tests = self._collect_tests(module) + collected = [] + if len(parts) == 1: + pattern = parts[0] + if callable(getattr(module, pattern, None) + ) and pattern not in tests: + # consider it as a suite + return self.loadTestsFromSuite(module, pattern) + if pattern in tests: + # case python unittest_foo.py MyTestTC + klass, methodnames = tests[pattern] + for methodname in methodnames: + collected = [klass(methodname) + for methodname in methodnames] + else: + # case python unittest_foo.py something + for klass, methodnames in tests.values(): + # skip methodname if matched by skipped_patterns + for skip_pattern in self.skipped_patterns: + methodnames = [methodname + for methodname in methodnames + if skip_pattern not in methodname] + collected += [klass(methodname) + for methodname in methodnames + if pattern in methodname] + elif len(parts) == 2: + # case "MyClass.test_1" + classname, pattern = parts + klass, methodnames = tests.get(classname, (None, [])) + for methodname in methodnames: + collected = [klass(methodname) for methodname in methodnames + if pattern in methodname] + return collected + + def _this_is_skipped(self, testedname): + return any([(pat in testedname) for pat in self.skipped_patterns]) + + def getTestCaseNames(self, testCaseClass): + """Return a sorted sequence of method names found within testCaseClass + """ + is_skipped = self._this_is_skipped + classname = testCaseClass.__name__ + if classname[0] == '_' or is_skipped(classname): + return [] + testnames = super(NonStrictTestLoader, self).getTestCaseNames( + testCaseClass) + return [testname for testname in testnames if not is_skipped(testname)] + + +# The 2 functions below are modified versions of the TestSuite.run method +# that is provided with unittest2 for python 2.6, in unittest2/suite.py +# It is used to monkeypatch the original implementation to support +# extra runcondition and options arguments (see in testlib.py) + +def _ts_run(self, result, runcondition=None, options=None): + self._wrapped_run(result, runcondition=runcondition, options=options) + self._tearDownPreviousClass(None, result) + self._handleModuleTearDown(result) + return result + +def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None): + for test in self: + if result.shouldStop: + break + if unittest_suite._isnotsuite(test): + self._tearDownPreviousClass(test, result) + self._handleModuleFixture(test, result) + self._handleClassSetUp(test, result) + result._previousTestClass = test.__class__ + if (getattr(test.__class__, '_classSetupFailed', False) or + getattr(result, '_moduleSetUpFailed', False)): + continue + + # --- modifications to deal with _wrapped_run --- + # original code is: + # + # if not debug: + # test(result) + # else: + # test.debug() + if hasattr(test, '_wrapped_run'): + try: + test._wrapped_run(result, debug, runcondition=runcondition, options=options) + except TypeError: + test._wrapped_run(result, debug) + elif not debug: + try: + test(result, runcondition, options) + except TypeError: + test(result) + else: + test.debug() + # --- end of modifications to deal with _wrapped_run --- + return result + +if sys.version_info >= (2, 7): + # The function below implements a modified version of the + # TestSuite.run method that is provided with python 2.7, in + # unittest/suite.py + def _ts_run(self, result, debug=False, runcondition=None, options=None): + topLevel = False + if getattr(result, '_testRunEntered', False) is False: + result._testRunEntered = topLevel = True + + self._wrapped_run(result, debug, runcondition, options) + + if topLevel: + self._tearDownPreviousClass(None, result) + self._handleModuleTearDown(result) + result._testRunEntered = False + return result + + +def enable_dbc(*args): + """ + Without arguments, return True if contracts can be enabled and should be + enabled (see option -d), return False otherwise. + + With arguments, return False if contracts can't or shouldn't be enabled, + otherwise weave ContractAspect with items passed as arguments. + """ + if not ENABLE_DBC: + return False + try: + from logilab.aspects.weaver import weaver + from logilab.aspects.lib.contracts import ContractAspect + except ImportError: + sys.stderr.write( + 'Warning: logilab.aspects is not available. Contracts disabled.') + return False + for arg in args: + weaver.weave_module(arg, ContractAspect) + return True + + +# monkeypatch unittest and doctest (ouch !) +unittest._TextTestResult = testlib.SkipAwareTestResult +unittest.TextTestRunner = SkipAwareTextTestRunner +unittest.TestLoader = NonStrictTestLoader +unittest.TestProgram = SkipAwareTestProgram + +if sys.version_info >= (2, 4): + doctest.DocTestCase.__bases__ = (testlib.TestCase,) + # XXX check python2.6 compatibility + #doctest.DocTestCase._cleanups = [] + #doctest.DocTestCase._out = [] +else: + unittest.FunctionTestCase.__bases__ = (testlib.TestCase,) +unittest.TestSuite.run = _ts_run +unittest.TestSuite._wrapped_run = _ts_wrapped_run diff --git a/pymode/libs/logilab/common/registry.py b/pymode/libs/logilab/common/registry.py new file mode 100644 index 00000000..86a85f94 --- /dev/null +++ b/pymode/libs/logilab/common/registry.py @@ -0,0 +1,1125 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of Logilab-common. +# +# Logilab-common is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# Logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with Logilab-common. If not, see . +"""This module provides bases for predicates dispatching (the pattern in use +here is similar to what's refered as multi-dispatch or predicate-dispatch in the +literature, though a bit different since the idea is to select across different +implementation 'e.g. classes), not to dispatch a message to a function or +method. It contains the following classes: + +* :class:`RegistryStore`, the top level object which loads implementation + objects and stores them into registries. You'll usually use it to access + registries and their contained objects; + +* :class:`Registry`, the base class which contains objects semantically grouped + (for instance, sharing a same API, hence the 'implementation' name). You'll + use it to select the proper implementation according to a context. Notice you + may use registries on their own without using the store. + +.. Note:: + + implementation objects are usually designed to be accessed through the + registry and not by direct instantiation, besides to use it as base classe. + +The selection procedure is delegated to a selector, which is responsible for +scoring the object according to some context. At the end of the selection, if an +implementation has been found, an instance of this class is returned. A selector +is built from one or more predicates combined together using AND, OR, NOT +operators (actually `&`, `|` and `~`). You'll thus find some base classes to +build predicates: + +* :class:`Predicate`, the abstract base predicate class + +* :class:`AndPredicate`, :class:`OrPredicate`, :class:`NotPredicate`, which you + shouldn't have to use directly. You'll use `&`, `|` and '~' operators between + predicates directly + +* :func:`objectify_predicate` + +You'll eventually find one concrete predicate: :class:`yes` + +.. autoclass:: RegistryStore +.. autoclass:: Registry + +Predicates +---------- +.. autoclass:: Predicate +.. autofunc:: objectify_predicate +.. autoclass:: yes + +Debugging +--------- +.. autoclass:: traced_selection + +Exceptions +---------- +.. autoclass:: RegistryException +.. autoclass:: RegistryNotFound +.. autoclass:: ObjectNotFound +.. autoclass:: NoSelectableObject +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import types +import weakref +import traceback as tb +from os import listdir, stat +from os.path import join, isdir, exists +from logging import getLogger +from warnings import warn + +from six import string_types, add_metaclass + +from logilab.common.modutils import modpath_from_file +from logilab.common.logging_ext import set_log_methods +from logilab.common.decorators import classproperty + + +class RegistryException(Exception): + """Base class for registry exception.""" + +class RegistryNotFound(RegistryException): + """Raised when an unknown registry is requested. + + This is usually a programming/typo error. + """ + +class ObjectNotFound(RegistryException): + """Raised when an unregistered object is requested. + + This may be a programming/typo or a misconfiguration error. + """ + +class NoSelectableObject(RegistryException): + """Raised when no object is selectable for a given context.""" + def __init__(self, args, kwargs, objects): + self.args = args + self.kwargs = kwargs + self.objects = objects + + def __str__(self): + return ('args: %s, kwargs: %s\ncandidates: %s' + % (self.args, self.kwargs.keys(), self.objects)) + +class SelectAmbiguity(RegistryException): + """Raised when several objects compete at selection time with an equal + score. + + """ + + +def _modname_from_path(path, extrapath=None): + modpath = modpath_from_file(path, extrapath) + # omit '__init__' from package's name to avoid loading that module + # once for each name when it is imported by some other object + # module. This supposes import in modules are done as:: + # + # from package import something + # + # not:: + # + # from package.__init__ import something + # + # which seems quite correct. + if modpath[-1] == '__init__': + modpath.pop() + return '.'.join(modpath) + + +def _toload_info(path, extrapath, _toload=None): + """Return a dictionary of : and an ordered list of + (file, module name) to load + """ + if _toload is None: + assert isinstance(path, list) + _toload = {}, [] + for fileordir in path: + if isdir(fileordir) and exists(join(fileordir, '__init__.py')): + subfiles = [join(fileordir, fname) for fname in listdir(fileordir)] + _toload_info(subfiles, extrapath, _toload) + elif fileordir[-3:] == '.py': + modname = _modname_from_path(fileordir, extrapath) + _toload[0][modname] = fileordir + _toload[1].append((fileordir, modname)) + return _toload + + +class RegistrableObject(object): + """This is the base class for registrable objects which are selected + according to a context. + + :attr:`__registry__` + name of the registry for this object (string like 'views', + 'templates'...). You may want to define `__registries__` directly if your + object should be registered in several registries. + + :attr:`__regid__` + object's identifier in the registry (string like 'main', + 'primary', 'folder_box') + + :attr:`__select__` + class'selector + + Moreover, the `__abstract__` attribute may be set to True to indicate that a + class is abstract and should not be registered. + + You don't have to inherit from this class to put it in a registry (having + `__regid__` and `__select__` is enough), though this is needed for classes + that should be automatically registered. + """ + + __registry__ = None + __regid__ = None + __select__ = None + __abstract__ = True # see doc snipppets below (in Registry class) + + @classproperty + def __registries__(cls): + if cls.__registry__ is None: + return () + return (cls.__registry__,) + + +class RegistrableInstance(RegistrableObject): + """Inherit this class if you want instances of the classes to be + automatically registered. + """ + + def __new__(cls, *args, **kwargs): + """Add a __module__ attribute telling the module where the instance was + created, for automatic registration. + """ + obj = super(RegistrableInstance, cls).__new__(cls) + # XXX subclass must no override __new__ + filepath = tb.extract_stack(limit=2)[0][0] + obj.__module__ = _modname_from_path(filepath) + return obj + + +class Registry(dict): + """The registry store a set of implementations associated to identifier: + + * to each identifier are associated a list of implementations + + * to select an implementation of a given identifier, you should use one of the + :meth:`select` or :meth:`select_or_none` method + + * to select a list of implementations for a context, you should use the + :meth:`possible_objects` method + + * dictionary like access to an identifier will return the bare list of + implementations for this identifier. + + To be usable in a registry, the only requirement is to have a `__select__` + attribute. + + At the end of the registration process, the :meth:`__registered__` + method is called on each registered object which have them, given the + registry in which it's registered as argument. + + Registration methods: + + .. automethod: register + .. automethod: unregister + + Selection methods: + + .. automethod: select + .. automethod: select_or_none + .. automethod: possible_objects + .. automethod: object_by_id + """ + def __init__(self, debugmode): + super(Registry, self).__init__() + self.debugmode = debugmode + + def __getitem__(self, name): + """return the registry (list of implementation objects) associated to + this name + """ + try: + return super(Registry, self).__getitem__(name) + except KeyError: + exc = ObjectNotFound(name) + exc.__traceback__ = sys.exc_info()[-1] + raise exc + + @classmethod + def objid(cls, obj): + """returns a unique identifier for an object stored in the registry""" + return '%s.%s' % (obj.__module__, cls.objname(obj)) + + @classmethod + def objname(cls, obj): + """returns a readable name for an object stored in the registry""" + return getattr(obj, '__name__', id(obj)) + + def initialization_completed(self): + """call method __registered__() on registered objects when the callback + is defined""" + for objects in self.values(): + for objectcls in objects: + registered = getattr(objectcls, '__registered__', None) + if registered: + registered(self) + if self.debugmode: + wrap_predicates(_lltrace) + + def register(self, obj, oid=None, clear=False): + """base method to add an object in the registry""" + assert not '__abstract__' in obj.__dict__, obj + assert obj.__select__, obj + oid = oid or obj.__regid__ + assert oid, ('no explicit name supplied to register object %s, ' + 'which has no __regid__ set' % obj) + if clear: + objects = self[oid] = [] + else: + objects = self.setdefault(oid, []) + assert not obj in objects, 'object %s is already registered' % obj + objects.append(obj) + + def register_and_replace(self, obj, replaced): + """remove and register """ + # XXXFIXME this is a duplication of unregister() + # remove register_and_replace in favor of unregister + register + # or simplify by calling unregister then register here + if not isinstance(replaced, string_types): + replaced = self.objid(replaced) + # prevent from misspelling + assert obj is not replaced, 'replacing an object by itself: %s' % obj + registered_objs = self.get(obj.__regid__, ()) + for index, registered in enumerate(registered_objs): + if self.objid(registered) == replaced: + del registered_objs[index] + break + else: + self.warning('trying to replace %s that is not registered with %s', + replaced, obj) + self.register(obj) + + def unregister(self, obj): + """remove object from this registry""" + objid = self.objid(obj) + oid = obj.__regid__ + for registered in self.get(oid, ()): + # use self.objid() to compare objects because vreg will probably + # have its own version of the object, loaded through execfile + if self.objid(registered) == objid: + self[oid].remove(registered) + break + else: + self.warning('can\'t remove %s, no id %s in the registry', + objid, oid) + + def all_objects(self): + """return a list containing all objects in this registry. + """ + result = [] + for objs in self.values(): + result += objs + return result + + # dynamic selection methods ################################################ + + def object_by_id(self, oid, *args, **kwargs): + """return object with the `oid` identifier. Only one object is expected + to be found. + + raise :exc:`ObjectNotFound` if there are no object with id `oid` in this + registry + + raise :exc:`AssertionError` if there is more than one object there + """ + objects = self[oid] + assert len(objects) == 1, objects + return objects[0](*args, **kwargs) + + def select(self, __oid, *args, **kwargs): + """return the most specific object among those with the given oid + according to the given context. + + raise :exc:`ObjectNotFound` if there are no object with id `oid` in this + registry + + raise :exc:`NoSelectableObject` if no object can be selected + """ + obj = self._select_best(self[__oid], *args, **kwargs) + if obj is None: + raise NoSelectableObject(args, kwargs, self[__oid] ) + return obj + + def select_or_none(self, __oid, *args, **kwargs): + """return the most specific object among those with the given oid + according to the given context, or None if no object applies. + """ + try: + return self._select_best(self[__oid], *args, **kwargs) + except ObjectNotFound: + return None + + def possible_objects(self, *args, **kwargs): + """return an iterator on possible objects in this registry for the given + context + """ + for objects in self.values(): + obj = self._select_best(objects, *args, **kwargs) + if obj is None: + continue + yield obj + + def _select_best(self, objects, *args, **kwargs): + """return an instance of the most specific object according + to parameters + + return None if not object apply (don't raise `NoSelectableObject` since + it's costly when searching objects using `possible_objects` + (e.g. searching for hooks). + """ + score, winners = 0, None + for obj in objects: + objectscore = obj.__select__(obj, *args, **kwargs) + if objectscore > score: + score, winners = objectscore, [obj] + elif objectscore > 0 and objectscore == score: + winners.append(obj) + if winners is None: + return None + if len(winners) > 1: + # log in production environement / test, error while debugging + msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)' + if self.debugmode: + # raise bare exception in debug mode + raise SelectAmbiguity(msg % (winners, args, kwargs.keys())) + self.error(msg, winners, args, kwargs.keys()) + # return the result of calling the object + return self.selected(winners[0], args, kwargs) + + def selected(self, winner, args, kwargs): + """override here if for instance you don't want "instanciation" + """ + return winner(*args, **kwargs) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None + + +def obj_registries(cls, registryname=None): + """return a tuple of registry names (see __registries__)""" + if registryname: + return (registryname,) + return cls.__registries__ + + +class RegistryStore(dict): + """This class is responsible for loading objects and storing them + in their registry which is created on the fly as needed. + + It handles dynamic registration of objects and provides a + convenient api to access them. To be recognized as an object that + should be stored into one of the store's registry + (:class:`Registry`), an object must provide the following + attributes, used control how they interact with the registry: + + :attr:`__registries__` + list of registry names (string like 'views', 'templates'...) into which + the object should be registered + + :attr:`__regid__` + object identifier in the registry (string like 'main', + 'primary', 'folder_box') + + :attr:`__select__` + the object predicate selectors + + Moreover, the :attr:`__abstract__` attribute may be set to `True` + to indicate that an object is abstract and should not be registered + (such inherited attributes not considered). + + .. Note:: + + When using the store to load objects dynamically, you *always* have + to use **super()** to get the methods and attributes of the + superclasses, and not use the class identifier. If not, you'll get into + trouble at reload time. + + For example, instead of writing:: + + class Thing(Parent): + __regid__ = 'athing' + __select__ = yes() + + def f(self, arg1): + Parent.f(self, arg1) + + You must write:: + + class Thing(Parent): + __regid__ = 'athing' + __select__ = yes() + + def f(self, arg1): + super(Thing, self).f(arg1) + + Controlling object registration + ------------------------------- + + Dynamic loading is triggered by calling the + :meth:`register_objects` method, given a list of directories to + inspect for python modules. + + .. automethod: register_objects + + For each module, by default, all compatible objects are registered + automatically. However if some objects come as replacement of + other objects, or have to be included only if some condition is + met, you'll have to define a `registration_callback(vreg)` + function in the module and explicitly register **all objects** in + this module, using the api defined below. + + + .. automethod:: RegistryStore.register_all + .. automethod:: RegistryStore.register_and_replace + .. automethod:: RegistryStore.register + .. automethod:: RegistryStore.unregister + + .. Note:: + Once the function `registration_callback(vreg)` is implemented in a + module, all the objects from this module have to be explicitly + registered as it disables the automatic object registration. + + + Examples: + + .. sourcecode:: python + + def registration_callback(store): + # register everything in the module except BabarClass + store.register_all(globals().values(), __name__, (BabarClass,)) + + # conditionally register BabarClass + if 'babar_relation' in store.schema: + store.register(BabarClass) + + In this example, we register all application object classes defined in the module + except `BabarClass`. This class is then registered only if the 'babar_relation' + relation type is defined in the instance schema. + + .. sourcecode:: python + + def registration_callback(store): + store.register(Elephant) + # replace Babar by Celeste + store.register_and_replace(Celeste, Babar) + + In this example, we explicitly register classes one by one: + + * the `Elephant` class + * the `Celeste` to replace `Babar` + + If at some point we register a new appobject class in this module, it won't be + registered at all without modification to the `registration_callback` + implementation. The first example will register it though, thanks to the call + to the `register_all` method. + + Controlling registry instantiation + ---------------------------------- + + The `REGISTRY_FACTORY` class dictionary allows to specify which class should + be instantiated for a given registry name. The class associated to `None` + key will be the class used when there is no specific class for a name. + """ + + def __init__(self, debugmode=False): + super(RegistryStore, self).__init__() + self.debugmode = debugmode + + def reset(self): + """clear all registries managed by this store""" + # don't use self.clear, we want to keep existing subdictionaries + for subdict in self.values(): + subdict.clear() + self._lastmodifs = {} + + def __getitem__(self, name): + """return the registry (dictionary of class objects) associated to + this name + """ + try: + return super(RegistryStore, self).__getitem__(name) + except KeyError: + exc = RegistryNotFound(name) + exc.__traceback__ = sys.exc_info()[-1] + raise exc + + # methods for explicit (un)registration ################################### + + # default class, when no specific class set + REGISTRY_FACTORY = {None: Registry} + + def registry_class(self, regid): + """return existing registry named regid or use factory to create one and + return it""" + try: + return self.REGISTRY_FACTORY[regid] + except KeyError: + return self.REGISTRY_FACTORY[None] + + def setdefault(self, regid): + try: + return self[regid] + except RegistryNotFound: + self[regid] = self.registry_class(regid)(self.debugmode) + return self[regid] + + def register_all(self, objects, modname, butclasses=()): + """register registrable objects into `objects`. + + Registrable objects are properly configured subclasses of + :class:`RegistrableObject`. Objects which are not defined in the module + `modname` or which are in `butclasses` won't be registered. + + Typical usage is: + + .. sourcecode:: python + + store.register_all(globals().values(), __name__, (ClassIWantToRegisterExplicitly,)) + + So you get partially automatic registration, keeping manual registration + for some object (to use + :meth:`~logilab.common.registry.RegistryStore.register_and_replace` for + instance). + """ + assert isinstance(modname, string_types), \ + 'modname expected to be a module name (ie string), got %r' % modname + for obj in objects: + if self.is_registrable(obj) and obj.__module__ == modname and not obj in butclasses: + if isinstance(obj, type): + self._load_ancestors_then_object(modname, obj, butclasses) + else: + self.register(obj) + + def register(self, obj, registryname=None, oid=None, clear=False): + """register `obj` implementation into `registryname` or + `obj.__registries__` if not specified, with identifier `oid` or + `obj.__regid__` if not specified. + + If `clear` is true, all objects with the same identifier will be + previously unregistered. + """ + assert not obj.__dict__.get('__abstract__'), obj + for registryname in obj_registries(obj, registryname): + registry = self.setdefault(registryname) + registry.register(obj, oid=oid, clear=clear) + self.debug("register %s in %s['%s']", + registry.objname(obj), registryname, oid or obj.__regid__) + self._loadedmods.setdefault(obj.__module__, {})[registry.objid(obj)] = obj + + def unregister(self, obj, registryname=None): + """unregister `obj` object from the registry `registryname` or + `obj.__registries__` if not specified. + """ + for registryname in obj_registries(obj, registryname): + registry = self[registryname] + registry.unregister(obj) + self.debug("unregister %s from %s['%s']", + registry.objname(obj), registryname, obj.__regid__) + + def register_and_replace(self, obj, replaced, registryname=None): + """register `obj` object into `registryname` or + `obj.__registries__` if not specified. If found, the `replaced` object + will be unregistered first (else a warning will be issued as it is + generally unexpected). + """ + for registryname in obj_registries(obj, registryname): + registry = self[registryname] + registry.register_and_replace(obj, replaced) + self.debug("register %s in %s['%s'] instead of %s", + registry.objname(obj), registryname, obj.__regid__, + registry.objname(replaced)) + + # initialization methods ################################################### + + def init_registration(self, path, extrapath=None): + """reset registry and walk down path to return list of (path, name) + file modules to be loaded""" + # XXX make this private by renaming it to _init_registration ? + self.reset() + # compute list of all modules that have to be loaded + self._toloadmods, filemods = _toload_info(path, extrapath) + # XXX is _loadedmods still necessary ? It seems like it's useful + # to avoid loading same module twice, especially with the + # _load_ancestors_then_object logic but this needs to be checked + self._loadedmods = {} + return filemods + + def register_objects(self, path, extrapath=None): + """register all objects found walking down """ + # load views from each directory in the instance's path + # XXX inline init_registration ? + filemods = self.init_registration(path, extrapath) + for filepath, modname in filemods: + self.load_file(filepath, modname) + self.initialization_completed() + + def initialization_completed(self): + """call initialization_completed() on all known registries""" + for reg in self.values(): + reg.initialization_completed() + + def _mdate(self, filepath): + """ return the modification date of a file path """ + try: + return stat(filepath)[-2] + except OSError: + # this typically happens on emacs backup files (.#foo.py) + self.warning('Unable to load %s. It is likely to be a backup file', + filepath) + return None + + def is_reload_needed(self, path): + """return True if something module changed and the registry should be + reloaded + """ + lastmodifs = self._lastmodifs + for fileordir in path: + if isdir(fileordir) and exists(join(fileordir, '__init__.py')): + if self.is_reload_needed([join(fileordir, fname) + for fname in listdir(fileordir)]): + return True + elif fileordir[-3:] == '.py': + mdate = self._mdate(fileordir) + if mdate is None: + continue # backup file, see _mdate implementation + elif "flymake" in fileordir: + # flymake + pylint in use, don't consider these they will corrupt the registry + continue + if fileordir not in lastmodifs or lastmodifs[fileordir] < mdate: + self.info('File %s changed since last visit', fileordir) + return True + return False + + def load_file(self, filepath, modname): + """ load registrable objects (if any) from a python file """ + from logilab.common.modutils import load_module_from_name + if modname in self._loadedmods: + return + self._loadedmods[modname] = {} + mdate = self._mdate(filepath) + if mdate is None: + return # backup file, see _mdate implementation + elif "flymake" in filepath: + # flymake + pylint in use, don't consider these they will corrupt the registry + return + # set update time before module loading, else we get some reloading + # weirdness in case of syntax error or other error while importing the + # module + self._lastmodifs[filepath] = mdate + # load the module + module = load_module_from_name(modname) + self.load_module(module) + + def load_module(self, module): + """Automatically handle module objects registration. + + Instances are registered as soon as they are hashable and have the + following attributes: + + * __regid__ (a string) + * __select__ (a callable) + * __registries__ (a tuple/list of string) + + For classes this is a bit more complicated : + + - first ensure parent classes are already registered + + - class with __abstract__ == True in their local dictionary are skipped + + - object class needs to have registries and identifier properly set to a + non empty string to be registered. + """ + self.info('loading %s from %s', module.__name__, module.__file__) + if hasattr(module, 'registration_callback'): + module.registration_callback(self) + else: + self.register_all(vars(module).values(), module.__name__) + + def _load_ancestors_then_object(self, modname, objectcls, butclasses=()): + """handle class registration according to rules defined in + :meth:`load_module` + """ + # backward compat, we used to allow whatever else than classes + if not isinstance(objectcls, type): + if self.is_registrable(objectcls) and objectcls.__module__ == modname: + self.register(objectcls) + return + # imported classes + objmodname = objectcls.__module__ + if objmodname != modname: + # The module of the object is not the same as the currently + # worked on module, or this is actually an instance, which + # has no module at all + if objmodname in self._toloadmods: + # if this is still scheduled for loading, let's proceed immediately, + # but using the object module + self.load_file(self._toloadmods[objmodname], objmodname) + return + # ensure object hasn't been already processed + clsid = '%s.%s' % (modname, objectcls.__name__) + if clsid in self._loadedmods[modname]: + return + self._loadedmods[modname][clsid] = objectcls + # ensure ancestors are registered + for parent in objectcls.__bases__: + self._load_ancestors_then_object(modname, parent, butclasses) + # ensure object is registrable + if objectcls in butclasses or not self.is_registrable(objectcls): + return + # backward compat + reg = self.setdefault(obj_registries(objectcls)[0]) + if reg.objname(objectcls)[0] == '_': + warn("[lgc 0.59] object whose name start with '_' won't be " + "skipped anymore at some point, use __abstract__ = True " + "instead (%s)" % objectcls, DeprecationWarning) + return + # register, finally + self.register(objectcls) + + @classmethod + def is_registrable(cls, obj): + """ensure `obj` should be registered + + as arbitrary stuff may be registered, do a lot of check and warn about + weird cases (think to dumb proxy objects) + """ + if isinstance(obj, type): + if not issubclass(obj, RegistrableObject): + # ducktyping backward compat + if not (getattr(obj, '__registries__', None) + and getattr(obj, '__regid__', None) + and getattr(obj, '__select__', None)): + return False + elif issubclass(obj, RegistrableInstance): + return False + elif not isinstance(obj, RegistrableInstance): + return False + if not obj.__regid__: + return False # no regid + registries = obj.__registries__ + if not registries: + return False # no registries + selector = obj.__select__ + if not selector: + return False # no selector + if obj.__dict__.get('__abstract__', False): + return False + # then detect potential problems that should be warned + if not isinstance(registries, (tuple, list)): + cls.warning('%s has __registries__ which is not a list or tuple', obj) + return False + if not callable(selector): + cls.warning('%s has not callable __select__', obj) + return False + return True + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None + + +# init logging +set_log_methods(RegistryStore, getLogger('registry.store')) +set_log_methods(Registry, getLogger('registry')) + + +# helpers for debugging selectors +TRACED_OIDS = None + +def _trace_selector(cls, selector, args, ret): + vobj = args[0] + if TRACED_OIDS == 'all' or vobj.__regid__ in TRACED_OIDS: + print('%s -> %s for %s(%s)' % (cls, ret, vobj, vobj.__regid__)) + +def _lltrace(selector): + """use this decorator on your predicates so they become traceable with + :class:`traced_selection` + """ + def traced(cls, *args, **kwargs): + ret = selector(cls, *args, **kwargs) + if TRACED_OIDS is not None: + _trace_selector(cls, selector, args, ret) + return ret + traced.__name__ = selector.__name__ + traced.__doc__ = selector.__doc__ + return traced + +class traced_selection(object): # pylint: disable=C0103 + """ + Typical usage is : + + .. sourcecode:: python + + >>> from logilab.common.registry import traced_selection + >>> with traced_selection(): + ... # some code in which you want to debug selectors + ... # for all objects + + This will yield lines like this in the logs:: + + selector one_line_rset returned 0 for + + You can also give to :class:`traced_selection` the identifiers of objects on + which you want to debug selection ('oid1' and 'oid2' in the example above). + + .. sourcecode:: python + + >>> with traced_selection( ('regid1', 'regid2') ): + ... # some code in which you want to debug selectors + ... # for objects with __regid__ 'regid1' and 'regid2' + + A potentially useful point to set up such a tracing function is + the `logilab.common.registry.Registry.select` method body. + """ + + def __init__(self, traced='all'): + self.traced = traced + + def __enter__(self): + global TRACED_OIDS + TRACED_OIDS = self.traced + + def __exit__(self, exctype, exc, traceback): + global TRACED_OIDS + TRACED_OIDS = None + return traceback is None + +# selector base classes and operations ######################################## + +def objectify_predicate(selector_func): + """Most of the time, a simple score function is enough to build a selector. + The :func:`objectify_predicate` decorator turn it into a proper selector + class:: + + @objectify_predicate + def one(cls, req, rset=None, **kwargs): + return 1 + + class MyView(View): + __select__ = View.__select__ & one() + + """ + return type(selector_func.__name__, (Predicate,), + {'__doc__': selector_func.__doc__, + '__call__': lambda self, *a, **kw: selector_func(*a, **kw)}) + + +_PREDICATES = {} + +def wrap_predicates(decorator): + for predicate in _PREDICATES.values(): + if not '_decorators' in predicate.__dict__: + predicate._decorators = set() + if decorator in predicate._decorators: + continue + predicate._decorators.add(decorator) + predicate.__call__ = decorator(predicate.__call__) + +class PredicateMetaClass(type): + def __new__(mcs, *args, **kwargs): + # use __new__ so subclasses doesn't have to call Predicate.__init__ + inst = type.__new__(mcs, *args, **kwargs) + proxy = weakref.proxy(inst, lambda p: _PREDICATES.pop(id(p))) + _PREDICATES[id(proxy)] = proxy + return inst + + +@add_metaclass(PredicateMetaClass) +class Predicate(object): + """base class for selector classes providing implementation + for operators ``&``, ``|`` and ``~`` + + This class is only here to give access to binary operators, the selector + logic itself should be implemented in the :meth:`__call__` method. Notice it + should usually accept any arbitrary arguments (the context), though that may + vary depending on your usage of the registry. + + a selector is called to help choosing the correct object for a + particular context by returning a score (`int`) telling how well + the implementation given as first argument fit to the given context. + + 0 score means that the class doesn't apply. + """ + + @property + def func_name(self): + # backward compatibility + return self.__class__.__name__ + + def search_selector(self, selector): + """search for the given selector, selector instance or tuple of + selectors in the selectors tree. Return None if not found. + """ + if self is selector: + return self + if (isinstance(selector, type) or isinstance(selector, tuple)) and \ + isinstance(self, selector): + return self + return None + + def __str__(self): + return self.__class__.__name__ + + def __and__(self, other): + return AndPredicate(self, other) + def __rand__(self, other): + return AndPredicate(other, self) + def __iand__(self, other): + return AndPredicate(self, other) + def __or__(self, other): + return OrPredicate(self, other) + def __ror__(self, other): + return OrPredicate(other, self) + def __ior__(self, other): + return OrPredicate(self, other) + + def __invert__(self): + return NotPredicate(self) + + # XXX (function | function) or (function & function) not managed yet + + def __call__(self, cls, *args, **kwargs): + return NotImplementedError("selector %s must implement its logic " + "in its __call__ method" % self.__class__) + + def __repr__(self): + return u'' % (self.__class__.__name__, id(self)) + + +class MultiPredicate(Predicate): + """base class for compound selector classes""" + + def __init__(self, *selectors): + self.selectors = self.merge_selectors(selectors) + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.selectors)) + + @classmethod + def merge_selectors(cls, selectors): + """deal with selector instanciation when necessary and merge + multi-selectors if possible: + + AndPredicate(AndPredicate(sel1, sel2), AndPredicate(sel3, sel4)) + ==> AndPredicate(sel1, sel2, sel3, sel4) + """ + merged_selectors = [] + for selector in selectors: + # XXX do we really want magic-transformations below? + # if so, wanna warn about them? + if isinstance(selector, types.FunctionType): + selector = objectify_predicate(selector)() + if isinstance(selector, type) and issubclass(selector, Predicate): + selector = selector() + assert isinstance(selector, Predicate), selector + if isinstance(selector, cls): + merged_selectors += selector.selectors + else: + merged_selectors.append(selector) + return merged_selectors + + def search_selector(self, selector): + """search for the given selector or selector instance (or tuple of + selectors) in the selectors tree. Return None if not found + """ + for childselector in self.selectors: + if childselector is selector: + return childselector + found = childselector.search_selector(selector) + if found is not None: + return found + # if not found in children, maybe we are looking for self? + return super(MultiPredicate, self).search_selector(selector) + + +class AndPredicate(MultiPredicate): + """and-chained selectors""" + def __call__(self, cls, *args, **kwargs): + score = 0 + for selector in self.selectors: + partscore = selector(cls, *args, **kwargs) + if not partscore: + return 0 + score += partscore + return score + + +class OrPredicate(MultiPredicate): + """or-chained selectors""" + def __call__(self, cls, *args, **kwargs): + for selector in self.selectors: + partscore = selector(cls, *args, **kwargs) + if partscore: + return partscore + return 0 + +class NotPredicate(Predicate): + """negation selector""" + def __init__(self, selector): + self.selector = selector + + def __call__(self, cls, *args, **kwargs): + score = self.selector(cls, *args, **kwargs) + return int(not score) + + def __str__(self): + return 'NOT(%s)' % self.selector + + +class yes(Predicate): # pylint: disable=C0103 + """Return the score given as parameter, with a default score of 0.5 so any + other selector take precedence. + + Usually used for objects which can be selected whatever the context, or + also sometimes to add arbitrary points to a score. + + Take care, `yes(0)` could be named 'no'... + """ + def __init__(self, score=0.5): + self.score = score + + def __call__(self, *args, **kwargs): + return self.score + + +# deprecated stuff ############################################################# + +from logilab.common.deprecation import deprecated + +@deprecated('[lgc 0.59] use Registry.objid class method instead') +def classid(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + +@deprecated('[lgc 0.59] use obj_registries function instead') +def class_registries(cls, registryname): + return obj_registries(cls, registryname) + diff --git a/pymode/libs/logilab/common/shellutils.py b/pymode/libs/logilab/common/shellutils.py new file mode 100644 index 00000000..4e689560 --- /dev/null +++ b/pymode/libs/logilab/common/shellutils.py @@ -0,0 +1,462 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""shell/term utilities, useful to write some python scripts instead of shell +scripts. +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import os +import glob +import shutil +import stat +import sys +import tempfile +import time +import fnmatch +import errno +import string +import random +import subprocess +from os.path import exists, isdir, islink, basename, join + +from six import string_types +from six.moves import range, input as raw_input + +from logilab.common import STD_BLACKLIST, _handle_blacklist +from logilab.common.compat import str_to_bytes +from logilab.common.deprecation import deprecated + +try: + from logilab.common.proc import ProcInfo, NoSuchProcess +except ImportError: + # windows platform + class NoSuchProcess(Exception): pass + + def ProcInfo(pid): + raise NoSuchProcess() + + +class tempdir(object): + + def __enter__(self): + self.path = tempfile.mkdtemp() + return self.path + + def __exit__(self, exctype, value, traceback): + # rmtree in all cases + shutil.rmtree(self.path) + return traceback is None + + +class pushd(object): + def __init__(self, directory): + self.directory = directory + + def __enter__(self): + self.cwd = os.getcwd() + os.chdir(self.directory) + return self.directory + + def __exit__(self, exctype, value, traceback): + os.chdir(self.cwd) + + +def chown(path, login=None, group=None): + """Same as `os.chown` function but accepting user login or group name as + argument. If login or group is omitted, it's left unchanged. + + Note: you must own the file to chown it (or be root). Otherwise OSError is raised. + """ + if login is None: + uid = -1 + else: + try: + uid = int(login) + except ValueError: + import pwd # Platforms: Unix + uid = pwd.getpwnam(login).pw_uid + if group is None: + gid = -1 + else: + try: + gid = int(group) + except ValueError: + import grp + gid = grp.getgrnam(group).gr_gid + os.chown(path, uid, gid) + +def mv(source, destination, _action=shutil.move): + """A shell-like mv, supporting wildcards. + """ + sources = glob.glob(source) + if len(sources) > 1: + assert isdir(destination) + for filename in sources: + _action(filename, join(destination, basename(filename))) + else: + try: + source = sources[0] + except IndexError: + raise OSError('No file matching %s' % source) + if isdir(destination) and exists(destination): + destination = join(destination, basename(source)) + try: + _action(source, destination) + except OSError as ex: + raise OSError('Unable to move %r to %r (%s)' % ( + source, destination, ex)) + +def rm(*files): + """A shell-like rm, supporting wildcards. + """ + for wfile in files: + for filename in glob.glob(wfile): + if islink(filename): + os.remove(filename) + elif isdir(filename): + shutil.rmtree(filename) + else: + os.remove(filename) + +def cp(source, destination): + """A shell-like cp, supporting wildcards. + """ + mv(source, destination, _action=shutil.copy) + +def find(directory, exts, exclude=False, blacklist=STD_BLACKLIST): + """Recursively find files ending with the given extensions from the directory. + + :type directory: str + :param directory: + directory where the search should start + + :type exts: basestring or list or tuple + :param exts: + extensions or lists or extensions to search + + :type exclude: boolean + :param exts: + if this argument is True, returning files NOT ending with the given + extensions + + :type blacklist: list or tuple + :param blacklist: + optional list of files or directory to ignore, default to the value of + `logilab.common.STD_BLACKLIST` + + :rtype: list + :return: + the list of all matching files + """ + if isinstance(exts, string_types): + exts = (exts,) + if exclude: + def match(filename, exts): + for ext in exts: + if filename.endswith(ext): + return False + return True + else: + def match(filename, exts): + for ext in exts: + if filename.endswith(ext): + return True + return False + files = [] + for dirpath, dirnames, filenames in os.walk(directory): + _handle_blacklist(blacklist, dirnames, filenames) + # don't append files if the directory is blacklisted + dirname = basename(dirpath) + if dirname in blacklist: + continue + files.extend([join(dirpath, f) for f in filenames if match(f, exts)]) + return files + + +def globfind(directory, pattern, blacklist=STD_BLACKLIST): + """Recursively finds files matching glob `pattern` under `directory`. + + This is an alternative to `logilab.common.shellutils.find`. + + :type directory: str + :param directory: + directory where the search should start + + :type pattern: basestring + :param pattern: + the glob pattern (e.g *.py, foo*.py, etc.) + + :type blacklist: list or tuple + :param blacklist: + optional list of files or directory to ignore, default to the value of + `logilab.common.STD_BLACKLIST` + + :rtype: iterator + :return: + iterator over the list of all matching files + """ + for curdir, dirnames, filenames in os.walk(directory): + _handle_blacklist(blacklist, dirnames, filenames) + for fname in fnmatch.filter(filenames, pattern): + yield join(curdir, fname) + +def unzip(archive, destdir): + import zipfile + if not exists(destdir): + os.mkdir(destdir) + zfobj = zipfile.ZipFile(archive) + for name in zfobj.namelist(): + if name.endswith('/'): + os.mkdir(join(destdir, name)) + else: + outfile = open(join(destdir, name), 'wb') + outfile.write(zfobj.read(name)) + outfile.close() + + +class Execute: + """This is a deadlock safe version of popen2 (no stdin), that returns + an object with errorlevel, out and err. + """ + + def __init__(self, command): + cmd = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + self.out, self.err = cmd.communicate() + self.status = os.WEXITSTATUS(cmd.returncode) + +Execute = deprecated('Use subprocess.Popen instead')(Execute) + + +def acquire_lock(lock_file, max_try=10, delay=10, max_delay=3600): + """Acquire a lock represented by a file on the file system + + If the process written in lock file doesn't exist anymore, we remove the + lock file immediately + If age of the lock_file is greater than max_delay, then we raise a UserWarning + """ + count = abs(max_try) + while count: + try: + fd = os.open(lock_file, os.O_EXCL | os.O_RDWR | os.O_CREAT) + os.write(fd, str_to_bytes(str(os.getpid())) ) + os.close(fd) + return True + except OSError as e: + if e.errno == errno.EEXIST: + try: + fd = open(lock_file, "r") + pid = int(fd.readline()) + pi = ProcInfo(pid) + age = (time.time() - os.stat(lock_file)[stat.ST_MTIME]) + if age / max_delay > 1 : + raise UserWarning("Command '%s' (pid %s) has locked the " + "file '%s' for %s minutes" + % (pi.name(), pid, lock_file, age/60)) + except UserWarning: + raise + except NoSuchProcess: + os.remove(lock_file) + except Exception: + # The try block is not essential. can be skipped. + # Note: ProcInfo object is only available for linux + # process information are not accessible... + # or lock_file is no more present... + pass + else: + raise + count -= 1 + time.sleep(delay) + else: + raise Exception('Unable to acquire %s' % lock_file) + +def release_lock(lock_file): + """Release a lock represented by a file on the file system.""" + os.remove(lock_file) + + +class ProgressBar(object): + """A simple text progression bar.""" + + def __init__(self, nbops, size=20, stream=sys.stdout, title=''): + if title: + self._fstr = '\r%s [%%-%ss]' % (title, int(size)) + else: + self._fstr = '\r[%%-%ss]' % int(size) + self._stream = stream + self._total = nbops + self._size = size + self._current = 0 + self._progress = 0 + self._current_text = None + self._last_text_write_size = 0 + + def _get_text(self): + return self._current_text + + def _set_text(self, text=None): + if text != self._current_text: + self._current_text = text + self.refresh() + + def _del_text(self): + self.text = None + + text = property(_get_text, _set_text, _del_text) + + def update(self, offset=1, exact=False): + """Move FORWARD to new cursor position (cursor will never go backward). + + :offset: fraction of ``size`` + + :exact: + + - False: offset relative to current cursor position if True + - True: offset as an asbsolute position + + """ + if exact: + self._current = offset + else: + self._current += offset + + progress = int((float(self._current)/float(self._total))*self._size) + if progress > self._progress: + self._progress = progress + self.refresh() + + def refresh(self): + """Refresh the progression bar display.""" + self._stream.write(self._fstr % ('=' * min(self._progress, self._size)) ) + if self._last_text_write_size or self._current_text: + template = ' %%-%is' % (self._last_text_write_size) + text = self._current_text + if text is None: + text = '' + self._stream.write(template % text) + self._last_text_write_size = len(text.rstrip()) + self._stream.flush() + + def finish(self): + self._stream.write('\n') + self._stream.flush() + + +class DummyProgressBar(object): + __slot__ = ('text',) + + def refresh(self): + pass + def update(self): + pass + def finish(self): + pass + + +_MARKER = object() +class progress(object): + + def __init__(self, nbops=_MARKER, size=_MARKER, stream=_MARKER, title=_MARKER, enabled=True): + self.nbops = nbops + self.size = size + self.stream = stream + self.title = title + self.enabled = enabled + + def __enter__(self): + if self.enabled: + kwargs = {} + for attr in ('nbops', 'size', 'stream', 'title'): + value = getattr(self, attr) + if value is not _MARKER: + kwargs[attr] = value + self.pb = ProgressBar(**kwargs) + else: + self.pb = DummyProgressBar() + return self.pb + + def __exit__(self, exc_type, exc_val, exc_tb): + self.pb.finish() + +class RawInput(object): + + def __init__(self, input=None, printer=None): + self._input = input or raw_input + self._print = printer + + def ask(self, question, options, default): + assert default in options + choices = [] + for option in options: + if option == default: + label = option[0].upper() + else: + label = option[0].lower() + if len(option) > 1: + label += '(%s)' % option[1:].lower() + choices.append((option, label)) + prompt = "%s [%s]: " % (question, + '/'.join([opt[1] for opt in choices])) + tries = 3 + while tries > 0: + answer = self._input(prompt).strip().lower() + if not answer: + return default + possible = [option for option, label in choices + if option.lower().startswith(answer)] + if len(possible) == 1: + return possible[0] + elif len(possible) == 0: + msg = '%s is not an option.' % answer + else: + msg = ('%s is an ambiguous answer, do you mean %s ?' % ( + answer, ' or '.join(possible))) + if self._print: + self._print(msg) + else: + print(msg) + tries -= 1 + raise Exception('unable to get a sensible answer') + + def confirm(self, question, default_is_yes=True): + default = default_is_yes and 'y' or 'n' + answer = self.ask(question, ('y', 'n'), default) + return answer == 'y' + +ASK = RawInput() + + +def getlogin(): + """avoid using os.getlogin() because of strange tty / stdin problems + (man 3 getlogin) + Another solution would be to use $LOGNAME, $USER or $USERNAME + """ + if sys.platform != 'win32': + import pwd # Platforms: Unix + return pwd.getpwuid(os.getuid())[0] + else: + return os.environ['USERNAME'] + +def generate_password(length=8, vocab=string.ascii_letters + string.digits): + """dumb password generation function""" + pwd = '' + for i in range(length): + pwd += random.choice(vocab) + return pwd diff --git a/pymode/libs/logilab/common/sphinx_ext.py b/pymode/libs/logilab/common/sphinx_ext.py new file mode 100644 index 00000000..a24608ce --- /dev/null +++ b/pymode/libs/logilab/common/sphinx_ext.py @@ -0,0 +1,87 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +from logilab.common.decorators import monkeypatch + +from sphinx.ext import autodoc + +class DocstringOnlyModuleDocumenter(autodoc.ModuleDocumenter): + objtype = 'docstring' + def format_signature(self): + pass + def add_directive_header(self, sig): + pass + def document_members(self, all_members=False): + pass + + def resolve_name(self, modname, parents, path, base): + if modname is not None: + return modname, parents + [base] + return (path or '') + base, [] + + +#autodoc.add_documenter(DocstringOnlyModuleDocumenter) + +def setup(app): + app.add_autodocumenter(DocstringOnlyModuleDocumenter) + + + +from sphinx.ext.autodoc import (ViewList, Options, AutodocReporter, nodes, + assemble_option_dict, nested_parse_with_titles) + +@monkeypatch(autodoc.AutoDirective) +def run(self): + self.filename_set = set() # a set of dependent filenames + self.reporter = self.state.document.reporter + self.env = self.state.document.settings.env + self.warnings = [] + self.result = ViewList() + + # find out what documenter to call + objtype = self.name[4:] + doc_class = self._registry[objtype] + # process the options with the selected documenter's option_spec + self.genopt = Options(assemble_option_dict( + self.options.items(), doc_class.option_spec)) + # generate the output + documenter = doc_class(self, self.arguments[0]) + documenter.generate(more_content=self.content) + if not self.result: + return self.warnings + + # record all filenames as dependencies -- this will at least + # partially make automatic invalidation possible + for fn in self.filename_set: + self.env.note_dependency(fn) + + # use a custom reporter that correctly assigns lines to source + # filename/description and lineno + old_reporter = self.state.memo.reporter + self.state.memo.reporter = AutodocReporter(self.result, + self.state.memo.reporter) + if self.name in ('automodule', 'autodocstring'): + node = nodes.section() + # necessary so that the child nodes get the right source/line set + node.document = self.state.document + nested_parse_with_titles(self.state, self.result, node) + else: + node = nodes.paragraph() + node.document = self.state.document + self.state.nested_parse(self.result, 0, node) + self.state.memo.reporter = old_reporter + return self.warnings + node.children diff --git a/pymode/libs/logilab/common/sphinxutils.py b/pymode/libs/logilab/common/sphinxutils.py new file mode 100644 index 00000000..ab6e8a18 --- /dev/null +++ b/pymode/libs/logilab/common/sphinxutils.py @@ -0,0 +1,122 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Sphinx utils + +ModuleGenerator: Generate a file that lists all the modules of a list of +packages in order to pull all the docstring. +This should not be used in a makefile to systematically generate sphinx +documentation! + +Typical usage: + +>>> from logilab.common.sphinxutils import ModuleGenerator +>>> mgen = ModuleGenerator('logilab common', '/home/adim/src/logilab/common') +>>> mgen.generate('api_logilab_common.rst', exclude_dirs=('test',)) +""" + +import os, sys +import os.path as osp +import inspect + +from logilab.common import STD_BLACKLIST +from logilab.common.shellutils import globfind +from logilab.common.modutils import load_module_from_file, modpath_from_file + +def module_members(module): + members = [] + for name, value in inspect.getmembers(module): + if getattr(value, '__module__', None) == module.__name__: + members.append( (name, value) ) + return sorted(members) + + +def class_members(klass): + return sorted([name for name in vars(klass) + if name not in ('__doc__', '__module__', + '__dict__', '__weakref__')]) + +class ModuleGenerator: + file_header = """.. -*- coding: utf-8 -*-\n\n%s\n""" + module_def = """ +:mod:`%s` +=======%s + +.. automodule:: %s + :members: %s +""" + class_def = """ + +.. autoclass:: %s + :members: %s + +""" + + def __init__(self, project_title, code_dir): + self.title = project_title + self.code_dir = osp.abspath(code_dir) + + def generate(self, dest_file, exclude_dirs=STD_BLACKLIST): + """make the module file""" + self.fn = open(dest_file, 'w') + num = len(self.title) + 6 + title = "=" * num + "\n %s API\n" % self.title + "=" * num + self.fn.write(self.file_header % title) + self.gen_modules(exclude_dirs=exclude_dirs) + self.fn.close() + + def gen_modules(self, exclude_dirs): + """generate all modules""" + for module in self.find_modules(exclude_dirs): + modname = module.__name__ + classes = [] + modmembers = [] + for objname, obj in module_members(module): + if inspect.isclass(obj): + classmembers = class_members(obj) + classes.append( (objname, classmembers) ) + else: + modmembers.append(objname) + self.fn.write(self.module_def % (modname, '=' * len(modname), + modname, + ', '.join(modmembers))) + for klass, members in classes: + self.fn.write(self.class_def % (klass, ', '.join(members))) + + def find_modules(self, exclude_dirs): + basepath = osp.dirname(self.code_dir) + basedir = osp.basename(basepath) + osp.sep + if basedir not in sys.path: + sys.path.insert(1, basedir) + for filepath in globfind(self.code_dir, '*.py', exclude_dirs): + if osp.basename(filepath) in ('setup.py', '__pkginfo__.py'): + continue + try: + module = load_module_from_file(filepath) + except: # module might be broken or magic + dotted_path = modpath_from_file(filepath) + module = type('.'.join(dotted_path), (), {}) # mock it + yield module + + +if __name__ == '__main__': + # example : + title, code_dir, outfile = sys.argv[1:] + generator = ModuleGenerator(title, code_dir) + # XXX modnames = ['logilab'] + generator.generate(outfile, ('test', 'tests', 'examples', + 'data', 'doc', '.hg', 'migration')) diff --git a/pymode/libs/logilab/common/table.py b/pymode/libs/logilab/common/table.py new file mode 100644 index 00000000..2f3df694 --- /dev/null +++ b/pymode/libs/logilab/common/table.py @@ -0,0 +1,929 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Table management module.""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from six.moves import range + +class Table(object): + """Table defines a data table with column and row names. + inv: + len(self.data) <= len(self.row_names) + forall(self.data, lambda x: len(x) <= len(self.col_names)) + """ + + def __init__(self, default_value=0, col_names=None, row_names=None): + self.col_names = [] + self.row_names = [] + self.data = [] + self.default_value = default_value + if col_names: + self.create_columns(col_names) + if row_names: + self.create_rows(row_names) + + def _next_row_name(self): + return 'row%s' % (len(self.row_names)+1) + + def __iter__(self): + return iter(self.data) + + def __eq__(self, other): + if other is None: + return False + else: + return list(self) == list(other) + + __hash__ = object.__hash__ + + def __ne__(self, other): + return not self == other + + def __len__(self): + return len(self.row_names) + + ## Rows / Columns creation ################################################# + def create_rows(self, row_names): + """Appends row_names to the list of existing rows + """ + self.row_names.extend(row_names) + for row_name in row_names: + self.data.append([self.default_value]*len(self.col_names)) + + def create_columns(self, col_names): + """Appends col_names to the list of existing columns + """ + for col_name in col_names: + self.create_column(col_name) + + def create_row(self, row_name=None): + """Creates a rowname to the row_names list + """ + row_name = row_name or self._next_row_name() + self.row_names.append(row_name) + self.data.append([self.default_value]*len(self.col_names)) + + + def create_column(self, col_name): + """Creates a colname to the col_names list + """ + self.col_names.append(col_name) + for row in self.data: + row.append(self.default_value) + + ## Sort by column ########################################################## + def sort_by_column_id(self, col_id, method = 'asc'): + """Sorts the table (in-place) according to data stored in col_id + """ + try: + col_index = self.col_names.index(col_id) + self.sort_by_column_index(col_index, method) + except ValueError: + raise KeyError("Col (%s) not found in table" % (col_id)) + + + def sort_by_column_index(self, col_index, method = 'asc'): + """Sorts the table 'in-place' according to data stored in col_index + + method should be in ('asc', 'desc') + """ + sort_list = sorted([(row[col_index], row, row_name) + for row, row_name in zip(self.data, self.row_names)]) + # Sorting sort_list will sort according to col_index + # If we want reverse sort, then reverse list + if method.lower() == 'desc': + sort_list.reverse() + + # Rebuild data / row names + self.data = [] + self.row_names = [] + for val, row, row_name in sort_list: + self.data.append(row) + self.row_names.append(row_name) + + def groupby(self, colname, *others): + """builds indexes of data + :returns: nested dictionaries pointing to actual rows + """ + groups = {} + colnames = (colname,) + others + col_indexes = [self.col_names.index(col_id) for col_id in colnames] + for row in self.data: + ptr = groups + for col_index in col_indexes[:-1]: + ptr = ptr.setdefault(row[col_index], {}) + ptr = ptr.setdefault(row[col_indexes[-1]], + Table(default_value=self.default_value, + col_names=self.col_names)) + ptr.append_row(tuple(row)) + return groups + + def select(self, colname, value): + grouped = self.groupby(colname) + try: + return grouped[value] + except KeyError: + return [] + + def remove(self, colname, value): + col_index = self.col_names.index(colname) + for row in self.data[:]: + if row[col_index] == value: + self.data.remove(row) + + + ## The 'setter' part ####################################################### + def set_cell(self, row_index, col_index, data): + """sets value of cell 'row_indew', 'col_index' to data + """ + self.data[row_index][col_index] = data + + + def set_cell_by_ids(self, row_id, col_id, data): + """sets value of cell mapped by row_id and col_id to data + Raises a KeyError if row_id or col_id are not found in the table + """ + try: + row_index = self.row_names.index(row_id) + except ValueError: + raise KeyError("Row (%s) not found in table" % (row_id)) + else: + try: + col_index = self.col_names.index(col_id) + self.data[row_index][col_index] = data + except ValueError: + raise KeyError("Column (%s) not found in table" % (col_id)) + + + def set_row(self, row_index, row_data): + """sets the 'row_index' row + pre: + type(row_data) == types.ListType + len(row_data) == len(self.col_names) + """ + self.data[row_index] = row_data + + + def set_row_by_id(self, row_id, row_data): + """sets the 'row_id' column + pre: + type(row_data) == types.ListType + len(row_data) == len(self.row_names) + Raises a KeyError if row_id is not found + """ + try: + row_index = self.row_names.index(row_id) + self.set_row(row_index, row_data) + except ValueError: + raise KeyError('Row (%s) not found in table' % (row_id)) + + + def append_row(self, row_data, row_name=None): + """Appends a row to the table + pre: + type(row_data) == types.ListType + len(row_data) == len(self.col_names) + """ + row_name = row_name or self._next_row_name() + self.row_names.append(row_name) + self.data.append(row_data) + return len(self.data) - 1 + + def insert_row(self, index, row_data, row_name=None): + """Appends row_data before 'index' in the table. To make 'insert' + behave like 'list.insert', inserting in an out of range index will + insert row_data to the end of the list + pre: + type(row_data) == types.ListType + len(row_data) == len(self.col_names) + """ + row_name = row_name or self._next_row_name() + self.row_names.insert(index, row_name) + self.data.insert(index, row_data) + + + def delete_row(self, index): + """Deletes the 'index' row in the table, and returns it. + Raises an IndexError if index is out of range + """ + self.row_names.pop(index) + return self.data.pop(index) + + + def delete_row_by_id(self, row_id): + """Deletes the 'row_id' row in the table. + Raises a KeyError if row_id was not found. + """ + try: + row_index = self.row_names.index(row_id) + self.delete_row(row_index) + except ValueError: + raise KeyError('Row (%s) not found in table' % (row_id)) + + + def set_column(self, col_index, col_data): + """sets the 'col_index' column + pre: + type(col_data) == types.ListType + len(col_data) == len(self.row_names) + """ + + for row_index, cell_data in enumerate(col_data): + self.data[row_index][col_index] = cell_data + + + def set_column_by_id(self, col_id, col_data): + """sets the 'col_id' column + pre: + type(col_data) == types.ListType + len(col_data) == len(self.col_names) + Raises a KeyError if col_id is not found + """ + try: + col_index = self.col_names.index(col_id) + self.set_column(col_index, col_data) + except ValueError: + raise KeyError('Column (%s) not found in table' % (col_id)) + + + def append_column(self, col_data, col_name): + """Appends the 'col_index' column + pre: + type(col_data) == types.ListType + len(col_data) == len(self.row_names) + """ + self.col_names.append(col_name) + for row_index, cell_data in enumerate(col_data): + self.data[row_index].append(cell_data) + + + def insert_column(self, index, col_data, col_name): + """Appends col_data before 'index' in the table. To make 'insert' + behave like 'list.insert', inserting in an out of range index will + insert col_data to the end of the list + pre: + type(col_data) == types.ListType + len(col_data) == len(self.row_names) + """ + self.col_names.insert(index, col_name) + for row_index, cell_data in enumerate(col_data): + self.data[row_index].insert(index, cell_data) + + + def delete_column(self, index): + """Deletes the 'index' column in the table, and returns it. + Raises an IndexError if index is out of range + """ + self.col_names.pop(index) + return [row.pop(index) for row in self.data] + + + def delete_column_by_id(self, col_id): + """Deletes the 'col_id' col in the table. + Raises a KeyError if col_id was not found. + """ + try: + col_index = self.col_names.index(col_id) + self.delete_column(col_index) + except ValueError: + raise KeyError('Column (%s) not found in table' % (col_id)) + + + ## The 'getter' part ####################################################### + + def get_shape(self): + """Returns a tuple which represents the table's shape + """ + return len(self.row_names), len(self.col_names) + shape = property(get_shape) + + def __getitem__(self, indices): + """provided for convenience""" + rows, multirows = None, False + cols, multicols = None, False + if isinstance(indices, tuple): + rows = indices[0] + if len(indices) > 1: + cols = indices[1] + else: + rows = indices + # define row slice + if isinstance(rows, str): + try: + rows = self.row_names.index(rows) + except ValueError: + raise KeyError("Row (%s) not found in table" % (rows)) + if isinstance(rows, int): + rows = slice(rows, rows+1) + multirows = False + else: + rows = slice(None) + multirows = True + # define col slice + if isinstance(cols, str): + try: + cols = self.col_names.index(cols) + except ValueError: + raise KeyError("Column (%s) not found in table" % (cols)) + if isinstance(cols, int): + cols = slice(cols, cols+1) + multicols = False + else: + cols = slice(None) + multicols = True + # get sub-table + tab = Table() + tab.default_value = self.default_value + tab.create_rows(self.row_names[rows]) + tab.create_columns(self.col_names[cols]) + for idx, row in enumerate(self.data[rows]): + tab.set_row(idx, row[cols]) + if multirows : + if multicols: + return tab + else: + return [item[0] for item in tab.data] + else: + if multicols: + return tab.data[0] + else: + return tab.data[0][0] + + def get_cell_by_ids(self, row_id, col_id): + """Returns the element at [row_id][col_id] + """ + try: + row_index = self.row_names.index(row_id) + except ValueError: + raise KeyError("Row (%s) not found in table" % (row_id)) + else: + try: + col_index = self.col_names.index(col_id) + except ValueError: + raise KeyError("Column (%s) not found in table" % (col_id)) + return self.data[row_index][col_index] + + def get_row_by_id(self, row_id): + """Returns the 'row_id' row + """ + try: + row_index = self.row_names.index(row_id) + except ValueError: + raise KeyError("Row (%s) not found in table" % (row_id)) + return self.data[row_index] + + def get_column_by_id(self, col_id, distinct=False): + """Returns the 'col_id' col + """ + try: + col_index = self.col_names.index(col_id) + except ValueError: + raise KeyError("Column (%s) not found in table" % (col_id)) + return self.get_column(col_index, distinct) + + def get_columns(self): + """Returns all the columns in the table + """ + return [self[:, index] for index in range(len(self.col_names))] + + def get_column(self, col_index, distinct=False): + """get a column by index""" + col = [row[col_index] for row in self.data] + if distinct: + col = list(set(col)) + return col + + def apply_stylesheet(self, stylesheet): + """Applies the stylesheet to this table + """ + for instruction in stylesheet.instructions: + eval(instruction) + + + def transpose(self): + """Keeps the self object intact, and returns the transposed (rotated) + table. + """ + transposed = Table() + transposed.create_rows(self.col_names) + transposed.create_columns(self.row_names) + for col_index, column in enumerate(self.get_columns()): + transposed.set_row(col_index, column) + return transposed + + + def pprint(self): + """returns a string representing the table in a pretty + printed 'text' format. + """ + # The maximum row name (to know the start_index of the first col) + max_row_name = 0 + for row_name in self.row_names: + if len(row_name) > max_row_name: + max_row_name = len(row_name) + col_start = max_row_name + 5 + + lines = [] + # Build the 'first' line <=> the col_names one + # The first cell <=> an empty one + col_names_line = [' '*col_start] + for col_name in self.col_names: + col_names_line.append(col_name + ' '*5) + lines.append('|' + '|'.join(col_names_line) + '|') + max_line_length = len(lines[0]) + + # Build the table + for row_index, row in enumerate(self.data): + line = [] + # First, build the row_name's cell + row_name = self.row_names[row_index] + line.append(row_name + ' '*(col_start-len(row_name))) + + # Then, build all the table's cell for this line. + for col_index, cell in enumerate(row): + col_name_length = len(self.col_names[col_index]) + 5 + data = str(cell) + line.append(data + ' '*(col_name_length - len(data))) + lines.append('|' + '|'.join(line) + '|') + if len(lines[-1]) > max_line_length: + max_line_length = len(lines[-1]) + + # Wrap the table with '-' to make a frame + lines.insert(0, '-'*max_line_length) + lines.append('-'*max_line_length) + return '\n'.join(lines) + + + def __repr__(self): + return repr(self.data) + + def as_text(self): + data = [] + # We must convert cells into strings before joining them + for row in self.data: + data.append([str(cell) for cell in row]) + lines = ['\t'.join(row) for row in data] + return '\n'.join(lines) + + + +class TableStyle: + """Defines a table's style + """ + + def __init__(self, table): + + self._table = table + self.size = dict([(col_name, '1*') for col_name in table.col_names]) + # __row_column__ is a special key to define the first column which + # actually has no name (<=> left most column <=> row names column) + self.size['__row_column__'] = '1*' + self.alignment = dict([(col_name, 'right') + for col_name in table.col_names]) + self.alignment['__row_column__'] = 'right' + + # We shouldn't have to create an entry for + # the 1st col (the row_column one) + self.units = dict([(col_name, '') for col_name in table.col_names]) + self.units['__row_column__'] = '' + + # XXX FIXME : params order should be reversed for all set() methods + def set_size(self, value, col_id): + """sets the size of the specified col_id to value + """ + self.size[col_id] = value + + def set_size_by_index(self, value, col_index): + """Allows to set the size according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + self.size[col_id] = value + + + def set_alignment(self, value, col_id): + """sets the alignment of the specified col_id to value + """ + self.alignment[col_id] = value + + + def set_alignment_by_index(self, value, col_index): + """Allows to set the alignment according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + self.alignment[col_id] = value + + + def set_unit(self, value, col_id): + """sets the unit of the specified col_id to value + """ + self.units[col_id] = value + + + def set_unit_by_index(self, value, col_index): + """Allows to set the unit according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + (Note that in the 'unit' case, you shouldn't have to set a unit + for the 1st column (the __row__column__ one)) + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + self.units[col_id] = value + + + def get_size(self, col_id): + """Returns the size of the specified col_id + """ + return self.size[col_id] + + + def get_size_by_index(self, col_index): + """Allows to get the size according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + return self.size[col_id] + + + def get_alignment(self, col_id): + """Returns the alignment of the specified col_id + """ + return self.alignment[col_id] + + + def get_alignment_by_index(self, col_index): + """Allors to get the alignment according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + return self.alignment[col_id] + + + def get_unit(self, col_id): + """Returns the unit of the specified col_id + """ + return self.units[col_id] + + + def get_unit_by_index(self, col_index): + """Allors to get the unit according to the column index rather than + using the column's id. + BE CAREFUL : the '0' column is the '__row_column__' one ! + """ + if col_index == 0: + col_id = '__row_column__' + else: + col_id = self._table.col_names[col_index-1] + + return self.units[col_id] + + +import re +CELL_PROG = re.compile("([0-9]+)_([0-9]+)") + +class TableStyleSheet: + """A simple Table stylesheet + Rules are expressions where cells are defined by the row_index + and col_index separated by an underscore ('_'). + For example, suppose you want to say that the (2,5) cell must be + the sum of its two preceding cells in the row, you would create + the following rule : + 2_5 = 2_3 + 2_4 + You can also use all the math.* operations you want. For example: + 2_5 = sqrt(2_3**2 + 2_4**2) + """ + + def __init__(self, rules = None): + rules = rules or [] + self.rules = [] + self.instructions = [] + for rule in rules: + self.add_rule(rule) + + + def add_rule(self, rule): + """Adds a rule to the stylesheet rules + """ + try: + source_code = ['from math import *'] + source_code.append(CELL_PROG.sub(r'self.data[\1][\2]', rule)) + self.instructions.append(compile('\n'.join(source_code), + 'table.py', 'exec')) + self.rules.append(rule) + except SyntaxError: + print("Bad Stylesheet Rule : %s [skipped]" % rule) + + + def add_rowsum_rule(self, dest_cell, row_index, start_col, end_col): + """Creates and adds a rule to sum over the row at row_index from + start_col to end_col. + dest_cell is a tuple of two elements (x,y) of the destination cell + No check is done for indexes ranges. + pre: + start_col >= 0 + end_col > start_col + """ + cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, + end_col + 1)] + rule = '%d_%d=' % dest_cell + '+'.join(cell_list) + self.add_rule(rule) + + + def add_rowavg_rule(self, dest_cell, row_index, start_col, end_col): + """Creates and adds a rule to make the row average (from start_col + to end_col) + dest_cell is a tuple of two elements (x,y) of the destination cell + No check is done for indexes ranges. + pre: + start_col >= 0 + end_col > start_col + """ + cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, + end_col + 1)] + num = (end_col - start_col + 1) + rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num + self.add_rule(rule) + + + def add_colsum_rule(self, dest_cell, col_index, start_row, end_row): + """Creates and adds a rule to sum over the col at col_index from + start_row to end_row. + dest_cell is a tuple of two elements (x,y) of the destination cell + No check is done for indexes ranges. + pre: + start_row >= 0 + end_row > start_row + """ + cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, + end_row + 1)] + rule = '%d_%d=' % dest_cell + '+'.join(cell_list) + self.add_rule(rule) + + + def add_colavg_rule(self, dest_cell, col_index, start_row, end_row): + """Creates and adds a rule to make the col average (from start_row + to end_row) + dest_cell is a tuple of two elements (x,y) of the destination cell + No check is done for indexes ranges. + pre: + start_row >= 0 + end_row > start_row + """ + cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, + end_row + 1)] + num = (end_row - start_row + 1) + rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num + self.add_rule(rule) + + + +class TableCellRenderer: + """Defines a simple text renderer + """ + + def __init__(self, **properties): + """keywords should be properties with an associated boolean as value. + For example : + renderer = TableCellRenderer(units = True, alignment = False) + An unspecified property will have a 'False' value by default. + Possible properties are : + alignment, unit + """ + self.properties = properties + + + def render_cell(self, cell_coord, table, table_style): + """Renders the cell at 'cell_coord' in the table, using table_style + """ + row_index, col_index = cell_coord + cell_value = table.data[row_index][col_index] + final_content = self._make_cell_content(cell_value, + table_style, col_index +1) + return self._render_cell_content(final_content, + table_style, col_index + 1) + + + def render_row_cell(self, row_name, table, table_style): + """Renders the cell for 'row_id' row + """ + cell_value = row_name + return self._render_cell_content(cell_value, table_style, 0) + + + def render_col_cell(self, col_name, table, table_style): + """Renders the cell for 'col_id' row + """ + cell_value = col_name + col_index = table.col_names.index(col_name) + return self._render_cell_content(cell_value, table_style, col_index +1) + + + + def _render_cell_content(self, content, table_style, col_index): + """Makes the appropriate rendering for this cell content. + Rendering properties will be searched using the + *table_style.get_xxx_by_index(col_index)' methods + + **This method should be overridden in the derived renderer classes.** + """ + return content + + + def _make_cell_content(self, cell_content, table_style, col_index): + """Makes the cell content (adds decoration data, like units for + example) + """ + final_content = cell_content + if 'skip_zero' in self.properties: + replacement_char = self.properties['skip_zero'] + else: + replacement_char = 0 + if replacement_char and final_content == 0: + return replacement_char + + try: + units_on = self.properties['units'] + if units_on: + final_content = self._add_unit( + cell_content, table_style, col_index) + except KeyError: + pass + + return final_content + + + def _add_unit(self, cell_content, table_style, col_index): + """Adds unit to the cell_content if needed + """ + unit = table_style.get_unit_by_index(col_index) + return str(cell_content) + " " + unit + + + +class DocbookRenderer(TableCellRenderer): + """Defines how to render a cell for a docboook table + """ + + def define_col_header(self, col_index, table_style): + """Computes the colspec element according to the style + """ + size = table_style.get_size_by_index(col_index) + return '\n' % \ + (col_index, size) + + + def _render_cell_content(self, cell_content, table_style, col_index): + """Makes the appropriate rendering for this cell content. + Rendering properties will be searched using the + table_style.get_xxx_by_index(col_index)' methods. + """ + try: + align_on = self.properties['alignment'] + alignment = table_style.get_alignment_by_index(col_index) + if align_on: + return "%s\n" % \ + (alignment, cell_content) + except KeyError: + # KeyError <=> Default alignment + return "%s\n" % cell_content + + +class TableWriter: + """A class to write tables + """ + + def __init__(self, stream, table, style, **properties): + self._stream = stream + self.style = style or TableStyle(table) + self._table = table + self.properties = properties + self.renderer = None + + + def set_style(self, style): + """sets the table's associated style + """ + self.style = style + + + def set_renderer(self, renderer): + """sets the way to render cell + """ + self.renderer = renderer + + + def update_properties(self, **properties): + """Updates writer's properties (for cell rendering) + """ + self.properties.update(properties) + + + def write_table(self, title = ""): + """Writes the table + """ + raise NotImplementedError("write_table must be implemented !") + + + +class DocbookTableWriter(TableWriter): + """Defines an implementation of TableWriter to write a table in Docbook + """ + + def _write_headers(self): + """Writes col headers + """ + # Define col_headers (colstpec elements) + for col_index in range(len(self._table.col_names)+1): + self._stream.write(self.renderer.define_col_header(col_index, + self.style)) + + self._stream.write("\n\n") + # XXX FIXME : write an empty entry <=> the first (__row_column) column + self._stream.write('\n') + for col_name in self._table.col_names: + self._stream.write(self.renderer.render_col_cell( + col_name, self._table, + self.style)) + + self._stream.write("\n\n") + + + def _write_body(self): + """Writes the table body + """ + self._stream.write('\n') + + for row_index, row in enumerate(self._table.data): + self._stream.write('\n') + row_name = self._table.row_names[row_index] + # Write the first entry (row_name) + self._stream.write(self.renderer.render_row_cell(row_name, + self._table, + self.style)) + + for col_index, cell in enumerate(row): + self._stream.write(self.renderer.render_cell( + (row_index, col_index), + self._table, self.style)) + + self._stream.write('\n') + + self._stream.write('\n') + + + def write_table(self, title = ""): + """Writes the table + """ + self._stream.write('\n%s>\n'%(title)) + self._stream.write( + '\n'% + (len(self._table.col_names)+1)) + self._write_headers() + self._write_body() + + self._stream.write('\n
\n') + + diff --git a/pymode/libs/logilab/common/tasksqueue.py b/pymode/libs/logilab/common/tasksqueue.py new file mode 100644 index 00000000..ed74cf5a --- /dev/null +++ b/pymode/libs/logilab/common/tasksqueue.py @@ -0,0 +1,101 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Prioritized tasks queue""" + +__docformat__ = "restructuredtext en" + +from bisect import insort_left + +from six.moves import queue + +LOW = 0 +MEDIUM = 10 +HIGH = 100 + +PRIORITY = { + 'LOW': LOW, + 'MEDIUM': MEDIUM, + 'HIGH': HIGH, + } +REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.items()) + + + +class PrioritizedTasksQueue(queue.Queue): + + def _init(self, maxsize): + """Initialize the queue representation""" + self.maxsize = maxsize + # ordered list of task, from the lowest to the highest priority + self.queue = [] + + def _put(self, item): + """Put a new item in the queue""" + for i, task in enumerate(self.queue): + # equivalent task + if task == item: + # if new task has a higher priority, remove the one already + # queued so the new priority will be considered + if task < item: + item.merge(task) + del self.queue[i] + break + # else keep it so current order is kept + task.merge(item) + return + insort_left(self.queue, item) + + def _get(self): + """Get an item from the queue""" + return self.queue.pop() + + def __iter__(self): + return iter(self.queue) + + def remove(self, tid): + """remove a specific task from the queue""" + # XXX acquire lock + for i, task in enumerate(self): + if task.id == tid: + self.queue.pop(i) + return + raise ValueError('not task of id %s in queue' % tid) + +class Task(object): + def __init__(self, tid, priority=LOW): + # task id + self.id = tid + # task priority + self.priority = priority + + def __repr__(self): + return '' % (self.id, id(self)) + + def __cmp__(self, other): + return cmp(self.priority, other.priority) + + def __lt__(self, other): + return self.priority < other.priority + + def __eq__(self, other): + return self.id == other.id + + __hash__ = object.__hash__ + + def merge(self, other): + pass diff --git a/pymode/libs/logilab/common/testlib.py b/pymode/libs/logilab/common/testlib.py new file mode 100644 index 00000000..a6b4b1e1 --- /dev/null +++ b/pymode/libs/logilab/common/testlib.py @@ -0,0 +1,1338 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Run tests. + +This will find all modules whose name match a given prefix in the test +directory, and run them. Various command line options provide +additional facilities. + +Command line options: + + -v verbose -- run tests in verbose mode with output to stdout + -q quiet -- don't print anything except if a test fails + -t testdir -- directory where the tests will be found + -x exclude -- add a test to exclude + -p profile -- profiled execution + -d dbc -- enable design-by-contract + -m match -- only run test matching the tag pattern which follow + +If no non-option arguments are present, prefixes used are 'test', +'regrtest', 'smoketest' and 'unittest'. + +""" + +from __future__ import print_function + +__docformat__ = "restructuredtext en" +# modified copy of some functions from test/regrtest.py from PyXml +# disable camel case warning +# pylint: disable=C0103 + +import sys +import os, os.path as osp +import re +import traceback +import inspect +import difflib +import tempfile +import math +import warnings +from shutil import rmtree +from operator import itemgetter +from itertools import dropwhile +from inspect import isgeneratorfunction + +from six import string_types +from six.moves import builtins, range, configparser, input + +from logilab.common.deprecation import deprecated + +import unittest as unittest_legacy +if not getattr(unittest_legacy, "__package__", None): + try: + import unittest2 as unittest + from unittest2 import SkipTest + except ImportError: + raise ImportError("You have to install python-unittest2 to use %s" % __name__) +else: + import unittest + from unittest import SkipTest + +from functools import wraps + +from logilab.common.debugger import Debugger, colorize_source +from logilab.common.decorators import cached, classproperty +from logilab.common import textutils + + +__all__ = ['main', 'unittest_main', 'find_tests', 'run_test', 'spawn'] + +DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest', + 'func', 'validation') + +is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction) + +# used by unittest to count the number of relevant levels in the traceback +__unittest = 1 + + +def with_tempdir(callable): + """A decorator ensuring no temporary file left when the function return + Work only for temporary file created with the tempfile module""" + if isgeneratorfunction(callable): + def proxy(*args, **kwargs): + old_tmpdir = tempfile.gettempdir() + new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-") + tempfile.tempdir = new_tmpdir + try: + for x in callable(*args, **kwargs): + yield x + finally: + try: + rmtree(new_tmpdir, ignore_errors=True) + finally: + tempfile.tempdir = old_tmpdir + return proxy + + @wraps(callable) + def proxy(*args, **kargs): + + old_tmpdir = tempfile.gettempdir() + new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-") + tempfile.tempdir = new_tmpdir + try: + return callable(*args, **kargs) + finally: + try: + rmtree(new_tmpdir, ignore_errors=True) + finally: + tempfile.tempdir = old_tmpdir + return proxy + +def in_tempdir(callable): + """A decorator moving the enclosed function inside the tempfile.tempfdir + """ + @wraps(callable) + def proxy(*args, **kargs): + + old_cwd = os.getcwd() + os.chdir(tempfile.tempdir) + try: + return callable(*args, **kargs) + finally: + os.chdir(old_cwd) + return proxy + +def within_tempdir(callable): + """A decorator run the enclosed function inside a tmpdir removed after execution + """ + proxy = with_tempdir(in_tempdir(callable)) + proxy.__name__ = callable.__name__ + return proxy + +def find_tests(testdir, + prefixes=DEFAULT_PREFIXES, suffix=".py", + excludes=(), + remove_suffix=True): + """ + Return a list of all applicable test modules. + """ + tests = [] + for name in os.listdir(testdir): + if not suffix or name.endswith(suffix): + for prefix in prefixes: + if name.startswith(prefix): + if remove_suffix and name.endswith(suffix): + name = name[:-len(suffix)] + if name not in excludes: + tests.append(name) + tests.sort() + return tests + + +## PostMortem Debug facilities ##### +def start_interactive_mode(result): + """starts an interactive shell so that the user can inspect errors + """ + debuggers = result.debuggers + descrs = result.error_descrs + result.fail_descrs + if len(debuggers) == 1: + # don't ask for test name if there's only one failure + debuggers[0].start() + else: + while True: + testindex = 0 + print("Choose a test to debug:") + # order debuggers in the same way than errors were printed + print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr) + in enumerate(descrs)])) + print("Type 'exit' (or ^D) to quit") + print() + try: + todebug = input('Enter a test name: ') + if todebug.strip().lower() == 'exit': + print() + break + else: + try: + testindex = int(todebug) + debugger = debuggers[descrs[testindex][0]] + except (ValueError, IndexError): + print("ERROR: invalid test number %r" % (todebug, )) + else: + debugger.start() + except (EOFError, KeyboardInterrupt): + print() + break + + +# test utils ################################################################## + +class SkipAwareTestResult(unittest._TextTestResult): + + def __init__(self, stream, descriptions, verbosity, + exitfirst=False, pdbmode=False, cvg=None, colorize=False): + super(SkipAwareTestResult, self).__init__(stream, + descriptions, verbosity) + self.skipped = [] + self.debuggers = [] + self.fail_descrs = [] + self.error_descrs = [] + self.exitfirst = exitfirst + self.pdbmode = pdbmode + self.cvg = cvg + self.colorize = colorize + self.pdbclass = Debugger + self.verbose = verbosity > 1 + + def descrs_for(self, flavour): + return getattr(self, '%s_descrs' % flavour.lower()) + + def _create_pdb(self, test_descr, flavour): + self.descrs_for(flavour).append( (len(self.debuggers), test_descr) ) + if self.pdbmode: + self.debuggers.append(self.pdbclass(sys.exc_info()[2])) + + def _iter_valid_frames(self, frames): + """only consider non-testlib frames when formatting traceback""" + lgc_testlib = osp.abspath(__file__) + std_testlib = osp.abspath(unittest.__file__) + invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib) + for frameinfo in dropwhile(invalid, frames): + yield frameinfo + + def _exc_info_to_string(self, err, test): + """Converts a sys.exc_info()-style tuple of values into a string. + + This method is overridden here because we want to colorize + lines if --color is passed, and display local variables if + --verbose is passed + """ + exctype, exc, tb = err + output = ['Traceback (most recent call last)'] + frames = inspect.getinnerframes(tb) + colorize = self.colorize + frames = enumerate(self._iter_valid_frames(frames)) + for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames: + filename = osp.abspath(filename) + if ctx is None: # pyc files or C extensions for instance + source = '' + else: + source = ''.join(ctx) + if colorize: + filename = textutils.colorize_ansi(filename, 'magenta') + source = colorize_source(source) + output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname)) + output.append(' %s' % source.strip()) + if self.verbose: + output.append('%r == %r' % (dir(frame), test.__module__)) + output.append('') + output.append(' ' + ' local variables '.center(66, '-')) + for varname, value in sorted(frame.f_locals.items()): + output.append(' %s: %r' % (varname, value)) + if varname == 'self': # special handy processing for self + for varname, value in sorted(vars(value).items()): + output.append(' self.%s: %r' % (varname, value)) + output.append(' ' + '-' * 66) + output.append('') + output.append(''.join(traceback.format_exception_only(exctype, exc))) + return '\n'.join(output) + + def addError(self, test, err): + """err -> (exc_type, exc, tcbk)""" + exc_type, exc, _ = err + if isinstance(exc, SkipTest): + assert exc_type == SkipTest + self.addSkip(test, exc) + else: + if self.exitfirst: + self.shouldStop = True + descr = self.getDescription(test) + super(SkipAwareTestResult, self).addError(test, err) + self._create_pdb(descr, 'error') + + def addFailure(self, test, err): + if self.exitfirst: + self.shouldStop = True + descr = self.getDescription(test) + super(SkipAwareTestResult, self).addFailure(test, err) + self._create_pdb(descr, 'fail') + + def addSkip(self, test, reason): + self.skipped.append((test, reason)) + if self.showAll: + self.stream.writeln("SKIPPED") + elif self.dots: + self.stream.write('S') + + def printErrors(self): + super(SkipAwareTestResult, self).printErrors() + self.printSkippedList() + + def printSkippedList(self): + # format (test, err) compatible with unittest2 + for test, err in self.skipped: + descr = self.getDescription(test) + self.stream.writeln(self.separator1) + self.stream.writeln("%s: %s" % ('SKIPPED', descr)) + self.stream.writeln("\t%s" % err) + + def printErrorList(self, flavour, errors): + for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors): + self.stream.writeln(self.separator1) + self.stream.writeln("%s: %s" % (flavour, descr)) + self.stream.writeln(self.separator2) + self.stream.writeln(err) + self.stream.writeln('no stdout'.center(len(self.separator2))) + self.stream.writeln('no stderr'.center(len(self.separator2))) + +# Add deprecation warnings about new api used by module level fixtures in unittest2 +# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule +class _DebugResult(object): # simplify import statement among unittest flavors.. + "Used by the TestSuite to hold previous class when running in debug." + _previousTestClass = None + _moduleSetUpFailed = False + shouldStop = False + +# backward compatibility: TestSuite might be imported from lgc.testlib +TestSuite = unittest.TestSuite + +class keywords(dict): + """Keyword args (**kwargs) support for generative tests.""" + +class starargs(tuple): + """Variable arguments (*args) for generative tests.""" + def __new__(cls, *args): + return tuple.__new__(cls, args) + +unittest_main = unittest.main + + +class InnerTestSkipped(SkipTest): + """raised when a test is skipped""" + pass + +def parse_generative_args(params): + args = [] + varargs = () + kwargs = {} + flags = 0 # 2 <=> starargs, 4 <=> kwargs + for param in params: + if isinstance(param, starargs): + varargs = param + if flags: + raise TypeError('found starargs after keywords !') + flags |= 2 + args += list(varargs) + elif isinstance(param, keywords): + kwargs = param + if flags & 4: + raise TypeError('got multiple keywords parameters') + flags |= 4 + elif flags & 2 or flags & 4: + raise TypeError('found parameters after kwargs or args') + else: + args.append(param) + + return args, kwargs + + +class InnerTest(tuple): + def __new__(cls, name, *data): + instance = tuple.__new__(cls, data) + instance.name = name + return instance + +class Tags(set): + """A set of tag able validate an expression""" + + def __init__(self, *tags, **kwargs): + self.inherit = kwargs.pop('inherit', True) + if kwargs: + raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys()) + + if len(tags) == 1 and not isinstance(tags[0], string_types): + tags = tags[0] + super(Tags, self).__init__(tags, **kwargs) + + def __getitem__(self, key): + return key in self + + def match(self, exp): + return eval(exp, {}, self) + + def __or__(self, other): + return Tags(*super(Tags, self).__or__(other)) + + +# duplicate definition from unittest2 of the _deprecate decorator +def _deprecate(original_func): + def deprecated_func(*args, **kwargs): + warnings.warn( + ('Please use %s instead.' % original_func.__name__), + DeprecationWarning, 2) + return original_func(*args, **kwargs) + return deprecated_func + +class TestCase(unittest.TestCase): + """A unittest.TestCase extension with some additional methods.""" + maxDiff = None + pdbclass = Debugger + tags = Tags() + + def __init__(self, methodName='runTest'): + super(TestCase, self).__init__(methodName) + self.__exc_info = sys.exc_info + self.__testMethodName = self._testMethodName + self._current_test_descr = None + self._options_ = None + + @classproperty + @cached + def datadir(cls): # pylint: disable=E0213 + """helper attribute holding the standard test's data directory + + NOTE: this is a logilab's standard + """ + mod = sys.modules[cls.__module__] + return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data') + # cache it (use a class method to cache on class since TestCase is + # instantiated for each test run) + + @classmethod + def datapath(cls, *fname): + """joins the object's datadir and `fname`""" + return osp.join(cls.datadir, *fname) + + def set_description(self, descr): + """sets the current test's description. + This can be useful for generative tests because it allows to specify + a description per yield + """ + self._current_test_descr = descr + + # override default's unittest.py feature + def shortDescription(self): + """override default unittest shortDescription to handle correctly + generative tests + """ + if self._current_test_descr is not None: + return self._current_test_descr + return super(TestCase, self).shortDescription() + + def quiet_run(self, result, func, *args, **kwargs): + try: + func(*args, **kwargs) + except (KeyboardInterrupt, SystemExit): + raise + except unittest.SkipTest as e: + if hasattr(result, 'addSkip'): + result.addSkip(self, str(e)) + else: + warnings.warn("TestResult has no addSkip method, skips not reported", + RuntimeWarning, 2) + result.addSuccess(self) + return False + except: + result.addError(self, self.__exc_info()) + return False + return True + + def _get_test_method(self): + """return the test method""" + return getattr(self, self._testMethodName) + + def optval(self, option, default=None): + """return the option value or default if the option is not define""" + return getattr(self._options_, option, default) + + def __call__(self, result=None, runcondition=None, options=None): + """rewrite TestCase.__call__ to support generative tests + This is mostly a copy/paste from unittest.py (i.e same + variable names, same logic, except for the generative tests part) + """ + from logilab.common.pytest import FILE_RESTART + if result is None: + result = self.defaultTestResult() + result.pdbclass = self.pdbclass + self._options_ = options + # if result.cvg: + # result.cvg.start() + testMethod = self._get_test_method() + if (getattr(self.__class__, "__unittest_skip__", False) or + getattr(testMethod, "__unittest_skip__", False)): + # If the class or method was skipped. + try: + skip_why = (getattr(self.__class__, '__unittest_skip_why__', '') + or getattr(testMethod, '__unittest_skip_why__', '')) + self._addSkip(result, skip_why) + finally: + result.stopTest(self) + return + if runcondition and not runcondition(testMethod): + return # test is skipped + result.startTest(self) + try: + if not self.quiet_run(result, self.setUp): + return + generative = isgeneratorfunction(testMethod) + # generative tests + if generative: + self._proceed_generative(result, testMethod, + runcondition) + else: + status = self._proceed(result, testMethod) + success = (status == 0) + if not self.quiet_run(result, self.tearDown): + return + if not generative and success: + if hasattr(options, "exitfirst") and options.exitfirst: + # add this test to restart file + try: + restartfile = open(FILE_RESTART, 'a') + try: + descr = '.'.join((self.__class__.__module__, + self.__class__.__name__, + self._testMethodName)) + restartfile.write(descr+os.linesep) + finally: + restartfile.close() + except Exception: + print("Error while saving succeeded test into", + osp.join(os.getcwd(), FILE_RESTART), + file=sys.__stderr__) + raise + result.addSuccess(self) + finally: + # if result.cvg: + # result.cvg.stop() + result.stopTest(self) + + def _proceed_generative(self, result, testfunc, runcondition=None): + # cancel startTest()'s increment + result.testsRun -= 1 + success = True + try: + for params in testfunc(): + if runcondition and not runcondition(testfunc, + skipgenerator=False): + if not (isinstance(params, InnerTest) + and runcondition(params)): + continue + if not isinstance(params, (tuple, list)): + params = (params, ) + func = params[0] + args, kwargs = parse_generative_args(params[1:]) + # increment test counter manually + result.testsRun += 1 + status = self._proceed(result, func, args, kwargs) + if status == 0: + result.addSuccess(self) + success = True + else: + success = False + # XXX Don't stop anymore if an error occured + #if status == 2: + # result.shouldStop = True + if result.shouldStop: # either on error or on exitfirst + error + break + except: + # if an error occurs between two yield + result.addError(self, self.__exc_info()) + success = False + return success + + def _proceed(self, result, testfunc, args=(), kwargs=None): + """proceed the actual test + returns 0 on success, 1 on failure, 2 on error + + Note: addSuccess can't be called here because we have to wait + for tearDown to be successfully executed to declare the test as + successful + """ + kwargs = kwargs or {} + try: + testfunc(*args, **kwargs) + except self.failureException: + result.addFailure(self, self.__exc_info()) + return 1 + except KeyboardInterrupt: + raise + except InnerTestSkipped as e: + result.addSkip(self, e) + return 1 + except SkipTest as e: + result.addSkip(self, e) + return 0 + except: + result.addError(self, self.__exc_info()) + return 2 + return 0 + + def defaultTestResult(self): + """return a new instance of the defaultTestResult""" + return SkipAwareTestResult() + + skip = _deprecate(unittest.TestCase.skipTest) + assertEquals = _deprecate(unittest.TestCase.assertEqual) + assertNotEquals = _deprecate(unittest.TestCase.assertNotEqual) + assertAlmostEquals = _deprecate(unittest.TestCase.assertAlmostEqual) + assertNotAlmostEquals = _deprecate(unittest.TestCase.assertNotAlmostEqual) + + def innerSkip(self, msg=None): + """mark a generative test as skipped for the reason""" + msg = msg or 'test was skipped' + raise InnerTestSkipped(msg) + + @deprecated('Please use assertDictEqual instead.') + def assertDictEquals(self, dict1, dict2, msg=None, context=None): + """compares two dicts + + If the two dict differ, the first difference is shown in the error + message + :param dict1: a Python Dictionary + :param dict2: a Python Dictionary + :param msg: custom message (String) in case of failure + """ + dict1 = dict(dict1) + msgs = [] + for key, value in dict2.items(): + try: + if dict1[key] != value: + msgs.append('%r != %r for key %r' % (dict1[key], value, + key)) + del dict1[key] + except KeyError: + msgs.append('missing %r key' % key) + if dict1: + msgs.append('dict2 is lacking %r' % dict1) + if msg: + self.failureException(msg) + elif msgs: + if context is not None: + base = '%s\n' % context + else: + base = '' + self.fail(base + '\n'.join(msgs)) + + @deprecated('Please use assertCountEqual instead.') + def assertUnorderedIterableEquals(self, got, expected, msg=None): + """compares two iterable and shows difference between both + + :param got: the unordered Iterable that we found + :param expected: the expected unordered Iterable + :param msg: custom message (String) in case of failure + """ + got, expected = list(got), list(expected) + self.assertSetEqual(set(got), set(expected), msg) + if len(got) != len(expected): + if msg is None: + msg = ['Iterable have the same elements but not the same number', + '\t\ti\t'] + got_count = {} + expected_count = {} + for element in got: + got_count[element] = got_count.get(element, 0) + 1 + for element in expected: + expected_count[element] = expected_count.get(element, 0) + 1 + # we know that got_count.key() == expected_count.key() + # because of assertSetEqual + for element, count in got_count.iteritems(): + other_count = expected_count[element] + if other_count != count: + msg.append('\t%s\t%s\t%s' % (element, other_count, count)) + + self.fail(msg) + + assertUnorderedIterableEqual = assertUnorderedIterableEquals + assertUnordIterEquals = assertUnordIterEqual = assertUnorderedIterableEqual + + @deprecated('Please use assertSetEqual instead.') + def assertSetEquals(self,got,expected, msg=None): + """compares two sets and shows difference between both + + Don't use it for iterables other than sets. + + :param got: the Set that we found + :param expected: the second Set to be compared to the first one + :param msg: custom message (String) in case of failure + """ + + if not(isinstance(got, set) and isinstance(expected, set)): + warnings.warn("the assertSetEquals function if now intended for set only."\ + "use assertUnorderedIterableEquals instead.", + DeprecationWarning, 2) + return self.assertUnorderedIterableEquals(got, expected, msg) + + items={} + items['missing'] = expected - got + items['unexpected'] = got - expected + if any(items.itervalues()): + if msg is None: + msg = '\n'.join('%s:\n\t%s' % (key, "\n\t".join(str(value) for value in values)) + for key, values in items.iteritems() if values) + self.fail(msg) + + @deprecated('Please use assertListEqual instead.') + def assertListEquals(self, list_1, list_2, msg=None): + """compares two lists + + If the two list differ, the first difference is shown in the error + message + + :param list_1: a Python List + :param list_2: a second Python List + :param msg: custom message (String) in case of failure + """ + _l1 = list_1[:] + for i, value in enumerate(list_2): + try: + if _l1[0] != value: + from pprint import pprint + pprint(list_1) + pprint(list_2) + self.fail('%r != %r for index %d' % (_l1[0], value, i)) + del _l1[0] + except IndexError: + if msg is None: + msg = 'list_1 has only %d elements, not %s '\ + '(at least %r missing)'% (i, len(list_2), value) + self.fail(msg) + if _l1: + if msg is None: + msg = 'list_2 is lacking %r' % _l1 + self.fail(msg) + + @deprecated('Non-standard. Please use assertMultiLineEqual instead.') + def assertLinesEquals(self, string1, string2, msg=None, striplines=False): + """compare two strings and assert that the text lines of the strings + are equal. + + :param string1: a String + :param string2: a String + :param msg: custom message (String) in case of failure + :param striplines: Boolean to trigger line stripping before comparing + """ + lines1 = string1.splitlines() + lines2 = string2.splitlines() + if striplines: + lines1 = [l.strip() for l in lines1] + lines2 = [l.strip() for l in lines2] + self.assertListEqual(lines1, lines2, msg) + assertLineEqual = assertLinesEquals + + @deprecated('Non-standard: please copy test method to your TestCase class') + def assertXMLWellFormed(self, stream, msg=None, context=2): + """asserts the XML stream is well-formed (no DTD conformance check) + + :param context: number of context lines in standard message + (show all data if negative). + Only available with element tree + """ + try: + from xml.etree.ElementTree import parse + self._assertETXMLWellFormed(stream, parse, msg) + except ImportError: + from xml.sax import make_parser, SAXParseException + parser = make_parser() + try: + parser.parse(stream) + except SAXParseException as ex: + if msg is None: + stream.seek(0) + for _ in range(ex.getLineNumber()): + line = stream.readline() + pointer = ('' * (ex.getLineNumber() - 1)) + '^' + msg = 'XML stream not well formed: %s\n%s%s' % (ex, line, pointer) + self.fail(msg) + + @deprecated('Non-standard: please copy test method to your TestCase class') + def assertXMLStringWellFormed(self, xml_string, msg=None, context=2): + """asserts the XML string is well-formed (no DTD conformance check) + + :param context: number of context lines in standard message + (show all data if negative). + Only available with element tree + """ + try: + from xml.etree.ElementTree import fromstring + except ImportError: + from elementtree.ElementTree import fromstring + self._assertETXMLWellFormed(xml_string, fromstring, msg) + + def _assertETXMLWellFormed(self, data, parse, msg=None, context=2): + """internal function used by /assertXML(String)?WellFormed/ functions + + :param data: xml_data + :param parse: appropriate parser function for this data + :param msg: error message + :param context: number of context lines in standard message + (show all data if negative). + Only available with element tree + """ + from xml.parsers.expat import ExpatError + try: + from xml.etree.ElementTree import ParseError + except ImportError: + # compatibility for 1: + if len(tup)<=1: + self.fail( "tuple %s has no attributes (%s expected)"%(tup, + dict(element.attrib))) + self.assertDictEqual(element.attrib, tup[1]) + # check children + if len(element) or len(tup)>2: + if len(tup)<=2: + self.fail( "tuple %s has no children (%i expected)"%(tup, + len(element))) + if len(element) != len(tup[2]): + self.fail( "tuple %s has %i children%s (%i expected)"%(tup, + len(tup[2]), + ('', 's')[len(tup[2])>1], len(element))) + for index in range(len(tup[2])): + self.assertXMLEqualsTuple(element[index], tup[2][index]) + #check text + if element.text or len(tup)>3: + if len(tup)<=3: + self.fail( "tuple %s has no text value (%r expected)"%(tup, + element.text)) + self.assertTextEquals(element.text, tup[3]) + #check tail + if element.tail or len(tup)>4: + if len(tup)<=4: + self.fail( "tuple %s has no tail value (%r expected)"%(tup, + element.tail)) + self.assertTextEquals(element.tail, tup[4]) + + def _difftext(self, lines1, lines2, junk=None, msg_prefix='Texts differ'): + junk = junk or (' ', '\t') + # result is a generator + result = difflib.ndiff(lines1, lines2, charjunk=lambda x: x in junk) + read = [] + for line in result: + read.append(line) + # lines that don't start with a ' ' are diff ones + if not line.startswith(' '): + self.fail('\n'.join(['%s\n'%msg_prefix]+read + list(result))) + + @deprecated('Non-standard. Please use assertMultiLineEqual instead.') + def assertTextEquals(self, text1, text2, junk=None, + msg_prefix='Text differ', striplines=False): + """compare two multiline strings (using difflib and splitlines()) + + :param text1: a Python BaseString + :param text2: a second Python Basestring + :param junk: List of Caracters + :param msg_prefix: String (message prefix) + :param striplines: Boolean to trigger line stripping before comparing + """ + msg = [] + if not isinstance(text1, string_types): + msg.append('text1 is not a string (%s)'%(type(text1))) + if not isinstance(text2, string_types): + msg.append('text2 is not a string (%s)'%(type(text2))) + if msg: + self.fail('\n'.join(msg)) + lines1 = text1.strip().splitlines(True) + lines2 = text2.strip().splitlines(True) + if striplines: + lines1 = [line.strip() for line in lines1] + lines2 = [line.strip() for line in lines2] + self._difftext(lines1, lines2, junk, msg_prefix) + assertTextEqual = assertTextEquals + + @deprecated('Non-standard: please copy test method to your TestCase class') + def assertStreamEquals(self, stream1, stream2, junk=None, + msg_prefix='Stream differ'): + """compare two streams (using difflib and readlines())""" + # if stream2 is stream2, readlines() on stream1 will also read lines + # in stream2, so they'll appear different, although they're not + if stream1 is stream2: + return + # make sure we compare from the beginning of the stream + stream1.seek(0) + stream2.seek(0) + # compare + self._difftext(stream1.readlines(), stream2.readlines(), junk, + msg_prefix) + + assertStreamEqual = assertStreamEquals + + @deprecated('Non-standard: please copy test method to your TestCase class') + def assertFileEquals(self, fname1, fname2, junk=(' ', '\t')): + """compares two files using difflib""" + self.assertStreamEqual(open(fname1), open(fname2), junk, + msg_prefix='Files differs\n-:%s\n+:%s\n'%(fname1, fname2)) + + assertFileEqual = assertFileEquals + + @deprecated('Non-standard: please copy test method to your TestCase class') + def assertDirEquals(self, path_a, path_b): + """compares two files using difflib""" + assert osp.exists(path_a), "%s doesn't exists" % path_a + assert osp.exists(path_b), "%s doesn't exists" % path_b + + all_a = [ (ipath[len(path_a):].lstrip('/'), idirs, ifiles) + for ipath, idirs, ifiles in os.walk(path_a)] + all_a.sort(key=itemgetter(0)) + + all_b = [ (ipath[len(path_b):].lstrip('/'), idirs, ifiles) + for ipath, idirs, ifiles in os.walk(path_b)] + all_b.sort(key=itemgetter(0)) + + iter_a, iter_b = iter(all_a), iter(all_b) + partial_iter = True + ipath_a, idirs_a, ifiles_a = data_a = None, None, None + while True: + try: + ipath_a, idirs_a, ifiles_a = datas_a = next(iter_a) + partial_iter = False + ipath_b, idirs_b, ifiles_b = datas_b = next(iter_b) + partial_iter = True + + + self.assertTrue(ipath_a == ipath_b, + "unexpected %s in %s while looking %s from %s" % + (ipath_a, path_a, ipath_b, path_b)) + + + errors = {} + sdirs_a = set(idirs_a) + sdirs_b = set(idirs_b) + errors["unexpected directories"] = sdirs_a - sdirs_b + errors["missing directories"] = sdirs_b - sdirs_a + + sfiles_a = set(ifiles_a) + sfiles_b = set(ifiles_b) + errors["unexpected files"] = sfiles_a - sfiles_b + errors["missing files"] = sfiles_b - sfiles_a + + + msgs = [ "%s: %s"% (name, items) + for name, items in errors.items() if items] + + if msgs: + msgs.insert(0, "%s and %s differ :" % ( + osp.join(path_a, ipath_a), + osp.join(path_b, ipath_b), + )) + self.fail("\n".join(msgs)) + + for files in (ifiles_a, ifiles_b): + files.sort() + + for index, path in enumerate(ifiles_a): + self.assertFileEquals(osp.join(path_a, ipath_a, path), + osp.join(path_b, ipath_b, ifiles_b[index])) + + except StopIteration: + break + + assertDirEqual = assertDirEquals + + def assertIsInstance(self, obj, klass, msg=None, strict=False): + """check if an object is an instance of a class + + :param obj: the Python Object to be checked + :param klass: the target class + :param msg: a String for a custom message + :param strict: if True, check that the class of is ; + else check with 'isinstance' + """ + if strict: + warnings.warn('[API] Non-standard. Strict parameter has vanished', + DeprecationWarning, stacklevel=2) + if msg is None: + if strict: + msg = '%r is not of class %s but of %s' + else: + msg = '%r is not an instance of %s but of %s' + msg = msg % (obj, klass, type(obj)) + if strict: + self.assertTrue(obj.__class__ is klass, msg) + else: + self.assertTrue(isinstance(obj, klass), msg) + + @deprecated('Please use assertIsNone instead.') + def assertNone(self, obj, msg=None): + """assert obj is None + + :param obj: Python Object to be tested + """ + if msg is None: + msg = "reference to %r when None expected"%(obj,) + self.assertTrue( obj is None, msg ) + + @deprecated('Please use assertIsNotNone instead.') + def assertNotNone(self, obj, msg=None): + """assert obj is not None""" + if msg is None: + msg = "unexpected reference to None" + self.assertTrue( obj is not None, msg ) + + @deprecated('Non-standard. Please use assertAlmostEqual instead.') + def assertFloatAlmostEquals(self, obj, other, prec=1e-5, + relative=False, msg=None): + """compares if two floats have a distance smaller than expected + precision. + + :param obj: a Float + :param other: another Float to be comparted to + :param prec: a Float describing the precision + :param relative: boolean switching to relative/absolute precision + :param msg: a String for a custom message + """ + if msg is None: + msg = "%r != %r" % (obj, other) + if relative: + prec = prec*math.fabs(obj) + self.assertTrue(math.fabs(obj - other) < prec, msg) + + def failUnlessRaises(self, excClass, callableObj=None, *args, **kwargs): + """override default failUnlessRaises method to return the raised + exception instance. + + Fail unless an exception of class excClass is thrown + by callableObj when invoked with arguments args and keyword + arguments kwargs. If a different type of exception is + thrown, it will not be caught, and the test case will be + deemed to have suffered an error, exactly as for an + unexpected exception. + + CAUTION! There are subtle differences between Logilab and unittest2 + - exc is not returned in standard version + - context capabilities in standard version + - try/except/else construction (minor) + + :param excClass: the Exception to be raised + :param callableObj: a callable Object which should raise + :param args: a List of arguments for + :param kwargs: a List of keyword arguments for + """ + # XXX cube vcslib : test_branches_from_app + if callableObj is None: + _assert = super(TestCase, self).assertRaises + return _assert(excClass, callableObj, *args, **kwargs) + try: + callableObj(*args, **kwargs) + except excClass as exc: + class ProxyException: + def __init__(self, obj): + self._obj = obj + def __getattr__(self, attr): + warn_msg = ("This exception was retrieved with the old testlib way " + "`exc = self.assertRaises(Exc, callable)`, please use " + "the context manager instead'") + warnings.warn(warn_msg, DeprecationWarning, 2) + return self._obj.__getattribute__(attr) + return ProxyException(exc) + else: + if hasattr(excClass, '__name__'): + excName = excClass.__name__ + else: + excName = str(excClass) + raise self.failureException("%s not raised" % excName) + + assertRaises = failUnlessRaises + + if sys.version_info >= (3,2): + assertItemsEqual = unittest.TestCase.assertCountEqual + else: + assertCountEqual = unittest.TestCase.assertItemsEqual + if sys.version_info < (2,7): + def assertIsNotNone(self, value, *args, **kwargs): + self.assertNotEqual(None, value, *args, **kwargs) + +TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')( + TestCase.assertItemsEqual) + +import doctest + +class SkippedSuite(unittest.TestSuite): + def test(self): + """just there to trigger test execution""" + self.skipped_test('doctest module has no DocTestSuite class') + + +class DocTestFinder(doctest.DocTestFinder): + + def __init__(self, *args, **kwargs): + self.skipped = kwargs.pop('skipped', ()) + doctest.DocTestFinder.__init__(self, *args, **kwargs) + + def _get_test(self, obj, name, module, globs, source_lines): + """override default _get_test method to be able to skip tests + according to skipped attribute's value + """ + if getattr(obj, '__name__', '') in self.skipped: + return None + return doctest.DocTestFinder._get_test(self, obj, name, module, + globs, source_lines) + + +class DocTest(TestCase): + """trigger module doctest + I don't know how to make unittest.main consider the DocTestSuite instance + without this hack + """ + skipped = () + def __call__(self, result=None, runcondition=None, options=None):\ + # pylint: disable=W0613 + try: + finder = DocTestFinder(skipped=self.skipped) + suite = doctest.DocTestSuite(self.module, test_finder=finder) + # XXX iirk + doctest.DocTestCase._TestCase__exc_info = sys.exc_info + except AttributeError: + suite = SkippedSuite() + # doctest may gork the builtins dictionnary + # This happen to the "_" entry used by gettext + old_builtins = builtins.__dict__.copy() + try: + return suite.run(result) + finally: + builtins.__dict__.clear() + builtins.__dict__.update(old_builtins) + run = __call__ + + def test(self): + """just there to trigger test execution""" + +MAILBOX = None + +class MockSMTP: + """fake smtplib.SMTP""" + + def __init__(self, host, port): + self.host = host + self.port = port + global MAILBOX + self.reveived = MAILBOX = [] + + def set_debuglevel(self, debuglevel): + """ignore debug level""" + + def sendmail(self, fromaddr, toaddres, body): + """push sent mail in the mailbox""" + self.reveived.append((fromaddr, toaddres, body)) + + def quit(self): + """ignore quit""" + + +class MockConfigParser(configparser.ConfigParser): + """fake ConfigParser.ConfigParser""" + + def __init__(self, options): + configparser.ConfigParser.__init__(self) + for section, pairs in options.iteritems(): + self.add_section(section) + for key, value in pairs.iteritems(): + self.set(section, key, value) + def write(self, _): + raise NotImplementedError() + + +class MockConnection: + """fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)""" + + def __init__(self, results): + self.received = [] + self.states = [] + self.results = results + + def cursor(self): + """Mock cursor method""" + return self + def execute(self, query, args=None): + """Mock execute method""" + self.received.append( (query, args) ) + def fetchone(self): + """Mock fetchone method""" + return self.results[0] + def fetchall(self): + """Mock fetchall method""" + return self.results + def commit(self): + """Mock commiy method""" + self.states.append( ('commit', len(self.received)) ) + def rollback(self): + """Mock rollback method""" + self.states.append( ('rollback', len(self.received)) ) + def close(self): + """Mock close method""" + pass + + +def mock_object(**params): + """creates an object using params to set attributes + >>> option = mock_object(verbose=False, index=range(5)) + >>> option.verbose + False + >>> option.index + [0, 1, 2, 3, 4] + """ + return type('Mock', (), params)() + + +def create_files(paths, chroot): + """Creates directories and files found in . + + :param paths: list of relative paths to files or directories + :param chroot: the root directory in which paths will be created + + >>> from os.path import isdir, isfile + >>> isdir('/tmp/a') + False + >>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp') + >>> isdir('/tmp/a') + True + >>> isdir('/tmp/a/b/c') + True + >>> isfile('/tmp/a/b/c/d/e.py') + True + >>> isfile('/tmp/a/b/foo.py') + True + """ + dirs, files = set(), set() + for path in paths: + path = osp.join(chroot, path) + filename = osp.basename(path) + # path is a directory path + if filename == '': + dirs.add(path) + # path is a filename path + else: + dirs.add(osp.dirname(path)) + files.add(path) + for dirpath in dirs: + if not osp.isdir(dirpath): + os.makedirs(dirpath) + for filepath in files: + open(filepath, 'w').close() + + +class AttrObject: # XXX cf mock_object + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + +def tag(*args, **kwargs): + """descriptor adding tag to a function""" + def desc(func): + assert not hasattr(func, 'tags') + func.tags = Tags(*args, **kwargs) + return func + return desc + +def require_version(version): + """ Compare version of python interpreter to the given one. Skip the test + if older. + """ + def check_require_version(f): + version_elements = version.split('.') + try: + compare = tuple([int(v) for v in version_elements]) + except ValueError: + raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) + current = sys.version_info[:3] + if current < compare: + def new_f(self, *args, **kwargs): + self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current]))) + new_f.__name__ = f.__name__ + return new_f + else: + return f + return check_require_version + +def require_module(module): + """ Check if the given module is loaded. Skip the test if not. + """ + def check_require_module(f): + try: + __import__(module) + return f + except ImportError: + def new_f(self, *args, **kwargs): + self.skipTest('%s can not be imported.' % module) + new_f.__name__ = f.__name__ + return new_f + return check_require_module + diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py b/pymode/libs/logilab/common/textutils.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py rename to pymode/libs/logilab/common/textutils.py index f55c0040..9046f975 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/textutils.py +++ b/pymode/libs/logilab/common/textutils.py @@ -284,11 +284,14 @@ def text_to_dict(text): dict of {'key': 'value'}. When the same key is encountered multiple time, value is turned into a list containing all values. - >>> text_to_dict('''multiple=1 + >>> d = text_to_dict('''multiple=1 ... multiple= 2 ... single =3 ... ''') - {'single': '3', 'multiple': ['1', '2']} + >>> d['single'] + '3' + >>> d['multiple'] + ['1', '2'] """ res = {} diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/tree.py b/pymode/libs/logilab/common/tree.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/tree.py rename to pymode/libs/logilab/common/tree.py diff --git a/pymode/libs/logilab/common/umessage.py b/pymode/libs/logilab/common/umessage.py new file mode 100644 index 00000000..a5e47995 --- /dev/null +++ b/pymode/libs/logilab/common/umessage.py @@ -0,0 +1,194 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Unicode email support (extends email from stdlib)""" + +__docformat__ = "restructuredtext en" + +import email +from encodings import search_function +import sys +if sys.version_info >= (2, 5): + from email.utils import parseaddr, parsedate + from email.header import decode_header +else: + from email.Utils import parseaddr, parsedate + from email.Header import decode_header + +from datetime import datetime + +try: + from mx.DateTime import DateTime +except ImportError: + DateTime = datetime + +import logilab.common as lgc + + +def decode_QP(string): + parts = [] + for decoded, charset in decode_header(string): + if not charset : + charset = 'iso-8859-15' + parts.append(decoded.decode(charset, 'replace')) + + if sys.version_info < (3, 3): + # decoding was non-RFC compliant wrt to whitespace handling + # see http://bugs.python.org/issue1079 + return u' '.join(parts) + return u''.join(parts) + +def message_from_file(fd): + try: + return UMessage(email.message_from_file(fd)) + except email.Errors.MessageParseError: + return '' + +def message_from_string(string): + try: + return UMessage(email.message_from_string(string)) + except email.Errors.MessageParseError: + return '' + +class UMessage: + """Encapsulates an email.Message instance and returns only unicode objects. + """ + + def __init__(self, message): + self.message = message + + # email.Message interface ################################################# + + def get(self, header, default=None): + value = self.message.get(header, default) + if value: + return decode_QP(value) + return value + + def __getitem__(self, header): + return self.get(header) + + def get_all(self, header, default=()): + return [decode_QP(val) for val in self.message.get_all(header, default) + if val is not None] + + def is_multipart(self): + return self.message.is_multipart() + + def get_boundary(self): + return self.message.get_boundary() + + def walk(self): + for part in self.message.walk(): + yield UMessage(part) + + if sys.version_info < (3, 0): + + def get_payload(self, index=None, decode=False): + message = self.message + if index is None: + payload = message.get_payload(index, decode) + if isinstance(payload, list): + return [UMessage(msg) for msg in payload] + if message.get_content_maintype() != 'text': + return payload + + charset = message.get_content_charset() or 'iso-8859-1' + if search_function(charset) is None: + charset = 'iso-8859-1' + return unicode(payload or '', charset, "replace") + else: + payload = UMessage(message.get_payload(index, decode)) + return payload + + def get_content_maintype(self): + return unicode(self.message.get_content_maintype()) + + def get_content_type(self): + return unicode(self.message.get_content_type()) + + def get_filename(self, failobj=None): + value = self.message.get_filename(failobj) + if value is failobj: + return value + try: + return unicode(value) + except UnicodeDecodeError: + return u'error decoding filename' + + else: + + def get_payload(self, index=None, decode=False): + message = self.message + if index is None: + payload = message.get_payload(index, decode) + if isinstance(payload, list): + return [UMessage(msg) for msg in payload] + return payload + else: + payload = UMessage(message.get_payload(index, decode)) + return payload + + def get_content_maintype(self): + return self.message.get_content_maintype() + + def get_content_type(self): + return self.message.get_content_type() + + def get_filename(self, failobj=None): + return self.message.get_filename(failobj) + + # other convenience methods ############################################### + + def headers(self): + """return an unicode string containing all the message's headers""" + values = [] + for header in self.message.keys(): + values.append(u'%s: %s' % (header, self.get(header))) + return '\n'.join(values) + + def multi_addrs(self, header): + """return a list of 2-uple (name, address) for the given address (which + is expected to be an header containing address such as from, to, cc...) + """ + persons = [] + for person in self.get_all(header, ()): + name, mail = parseaddr(person) + persons.append((name, mail)) + return persons + + def date(self, alternative_source=False, return_str=False): + """return a datetime object for the email's date or None if no date is + set or if it can't be parsed + """ + value = self.get('date') + if value is None and alternative_source: + unix_from = self.message.get_unixfrom() + if unix_from is not None: + try: + value = unix_from.split(" ", 2)[2] + except IndexError: + pass + if value is not None: + datetuple = parsedate(value) + if datetuple: + if lgc.USE_MX_DATETIME: + return DateTime(*datetuple[:6]) + return datetime(*datetuple[:6]) + elif not return_str: + return None + return value diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py b/pymode/libs/logilab/common/ureports/__init__.py similarity index 93% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py rename to pymode/libs/logilab/common/ureports/__init__.py index dcffcfa3..d76ebe52 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/__init__.py +++ b/pymode/libs/logilab/common/ureports/__init__.py @@ -20,13 +20,11 @@ A way to create simple reports using python objects, primarily designed to be formatted as text and html. """ -from __future__ import generators __docformat__ = "restructuredtext en" import sys -from cStringIO import StringIO -from StringIO import StringIO as UStringIO +from logilab.common.compat import StringIO from logilab.common.textutils import linesep @@ -44,13 +42,13 @@ def layout_title(layout): """ for child in layout.children: if isinstance(child, Title): - return ' '.join([node.data for node in get_nodes(child, Text)]) + return u' '.join([node.data for node in get_nodes(child, Text)]) def build_summary(layout, level=1): """make a summary for the report, including X level""" assert level > 0 level -= 1 - summary = List(klass='summary') + summary = List(klass=u'summary') for child in layout.children: if not isinstance(child, Section): continue @@ -59,7 +57,7 @@ def build_summary(layout, level=1): continue if not child.id: child.id = label.replace(' ', '-') - node = Link('#'+child.id, label=label or child.id) + node = Link(u'#'+child.id, label=label or child.id) # FIXME: Three following lines produce not very compliant # docbook: there are some useless . They might be # replaced by the three commented lines but this then produces @@ -101,7 +99,7 @@ def format_children(self, layout): for child in getattr(layout, 'children', ()): child.accept(self) - def writeln(self, string=''): + def writeln(self, string=u''): """write a line in the output buffer""" self.write(string + linesep) @@ -134,7 +132,7 @@ def get_table_content(self, table): result[-1].append(cell) # fill missing cells while len(result[-1]) < cols: - result[-1].append('') + result[-1].append(u'') return result def compute_content(self, layout): @@ -149,7 +147,7 @@ def write(data): stream.write(data) except UnicodeEncodeError: stream.write(data.encode(self.encoding)) - def writeln(data=''): + def writeln(data=u''): try: stream.write(data+linesep) except UnicodeEncodeError: @@ -158,7 +156,7 @@ def writeln(data=''): self.writeln = writeln self.__compute_funcs.append((write, writeln)) for child in layout.children: - stream = UStringIO() + stream = StringIO() child.accept(self) yield stream.getvalue() self.__compute_funcs.pop() diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/docbook_writer.py b/pymode/libs/logilab/common/ureports/docbook_writer.py similarity index 99% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/docbook_writer.py rename to pymode/libs/logilab/common/ureports/docbook_writer.py index e75cbe09..857068c8 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/docbook_writer.py +++ b/pymode/libs/logilab/common/ureports/docbook_writer.py @@ -16,9 +16,10 @@ # You should have received a copy of the GNU Lesser General Public License along # with logilab-common. If not, see . """HTML formatting drivers for ureports""" -from __future__ import generators __docformat__ = "restructuredtext en" +from six.moves import range + from logilab.common.ureports import HTMLWriter class DocbookWriter(HTMLWriter): diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py b/pymode/libs/logilab/common/ureports/html_writer.py similarity index 66% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py rename to pymode/libs/logilab/common/ureports/html_writer.py index 1d095034..eba34ea4 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/html_writer.py +++ b/pymode/libs/logilab/common/ureports/html_writer.py @@ -20,6 +20,8 @@ from cgi import escape +from six.moves import range + from logilab.common.ureports import BaseWriter @@ -32,100 +34,100 @@ def __init__(self, snippet=None): def handle_attrs(self, layout): """get an attribute string from layout member attributes""" - attrs = '' + attrs = u'' klass = getattr(layout, 'klass', None) if klass: - attrs += ' class="%s"' % klass + attrs += u' class="%s"' % klass nid = getattr(layout, 'id', None) if nid: - attrs += ' id="%s"' % nid + attrs += u' id="%s"' % nid return attrs def begin_format(self, layout): """begin to format a layout""" super(HTMLWriter, self).begin_format(layout) if self.snippet is None: - self.writeln('') - self.writeln('') + self.writeln(u'') + self.writeln(u'') def end_format(self, layout): """finished to format a layout""" if self.snippet is None: - self.writeln('') - self.writeln('') + self.writeln(u'') + self.writeln(u'') def visit_section(self, layout): """display a section as html, using div + h[section level]""" self.section += 1 - self.writeln('' % self.handle_attrs(layout)) + self.writeln(u'' % self.handle_attrs(layout)) self.format_children(layout) - self.writeln('') + self.writeln(u'') self.section -= 1 def visit_title(self, layout): """display a title using """ - self.write('' % (self.section, self.handle_attrs(layout))) + self.write(u'' % (self.section, self.handle_attrs(layout))) self.format_children(layout) - self.writeln('' % self.section) + self.writeln(u'' % self.section) def visit_table(self, layout): """display a table as html""" - self.writeln('' % self.handle_attrs(layout)) + self.writeln(u'' % self.handle_attrs(layout)) table_content = self.get_table_content(layout) for i in range(len(table_content)): row = table_content[i] if i == 0 and layout.rheaders: - self.writeln('') + self.writeln(u'') elif i+1 == len(table_content) and layout.rrheaders: - self.writeln('') + self.writeln(u'') else: - self.writeln('' % (i%2 and 'even' or 'odd')) + self.writeln(u'' % (i%2 and 'even' or 'odd')) for j in range(len(row)): - cell = row[j] or ' ' + cell = row[j] or u' ' if (layout.rheaders and i == 0) or \ (layout.cheaders and j == 0) or \ (layout.rrheaders and i+1 == len(table_content)) or \ (layout.rcheaders and j+1 == len(row)): - self.writeln('%s' % cell) + self.writeln(u'%s' % cell) else: - self.writeln('%s' % cell) - self.writeln('') - self.writeln('') + self.writeln(u'%s' % cell) + self.writeln(u'') + self.writeln(u'') def visit_list(self, layout): """display a list as html""" - self.writeln('' % self.handle_attrs(layout)) + self.writeln(u'' % self.handle_attrs(layout)) for row in list(self.compute_content(layout)): - self.writeln('
  • %s
  • ' % row) - self.writeln('') + self.writeln(u'
  • %s
  • ' % row) + self.writeln(u'') def visit_paragraph(self, layout): """display links (using

    )""" - self.write('

    ') + self.write(u'

    ') self.format_children(layout) - self.write('

    ') + self.write(u'

    ') def visit_span(self, layout): """display links (using

    )""" - self.write('' % self.handle_attrs(layout)) + self.write(u'' % self.handle_attrs(layout)) self.format_children(layout) - self.write('') + self.write(u'') def visit_link(self, layout): """display links (using )""" - self.write(' %s' % (layout.url, - self.handle_attrs(layout), - layout.label)) + self.write(u' %s' % (layout.url, + self.handle_attrs(layout), + layout.label)) def visit_verbatimtext(self, layout): """display verbatim text (using

    )"""
    -        self.write('
    ')
    -        self.write(layout.data.replace('&', '&').replace('<', '<'))
    -        self.write('
    ') + self.write(u'
    ')
    +        self.write(layout.data.replace(u'&', u'&').replace(u'<', u'<'))
    +        self.write(u'
    ') def visit_text(self, layout): """add some text""" data = layout.data if layout.escaped: - data = data.replace('&', '&').replace('<', '<') + data = data.replace(u'&', u'&').replace(u'<', u'<') self.write(data) diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py b/pymode/libs/logilab/common/ureports/nodes.py similarity index 98% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py rename to pymode/libs/logilab/common/ureports/nodes.py index d63b5828..a9585b30 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/nodes.py +++ b/pymode/libs/logilab/common/ureports/nodes.py @@ -23,6 +23,8 @@ from logilab.common.tree import VNode +from six import string_types + class BaseComponent(VNode): """base report component @@ -79,7 +81,7 @@ def __init__(self, data, escaped=True, **kwargs): super(Text, self).__init__(**kwargs) #if isinstance(data, unicode): # data = data.encode('ascii') - assert isinstance(data, (str, unicode)), data.__class__ + assert isinstance(data, string_types), data.__class__ self.escaped = escaped self.data = data diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py b/pymode/libs/logilab/common/ureports/text_writer.py similarity index 82% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py rename to pymode/libs/logilab/common/ureports/text_writer.py index 04c8f263..c87613c9 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/ureports/text_writer.py +++ b/pymode/libs/logilab/common/ureports/text_writer.py @@ -16,14 +16,19 @@ # You should have received a copy of the GNU Lesser General Public License along # with logilab-common. If not, see . """Text formatting drivers for ureports""" + +from __future__ import print_function + __docformat__ = "restructuredtext en" +from six.moves import range + from logilab.common.textutils import linesep from logilab.common.ureports import BaseWriter -TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^'] -BULLETS = ['*', '-'] +TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^'] +BULLETS = [u'*', u'-'] class TextWriter(BaseWriter): """format layouts as text @@ -43,18 +48,18 @@ def visit_section(self, layout): if self.pending_urls: self.writeln() for label, url in self.pending_urls: - self.writeln('.. _`%s`: %s' % (label, url)) + self.writeln(u'.. _`%s`: %s' % (label, url)) self.pending_urls = [] self.section -= 1 self.writeln() def visit_title(self, layout): - title = ''.join(list(self.compute_content(layout))) + title = u''.join(list(self.compute_content(layout))) self.writeln(title) try: self.writeln(TITLE_UNDERLINES[self.section] * len(title)) except IndexError: - print "FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT" + print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT") def visit_paragraph(self, layout): """enter a paragraph""" @@ -83,19 +88,19 @@ def visit_table(self, layout): def default_table(self, layout, table_content, cols_width): """format a table""" cols_width = [size+1 for size in cols_width] - format_strings = ' '.join(['%%-%ss'] * len(cols_width)) + format_strings = u' '.join([u'%%-%ss'] * len(cols_width)) format_strings = format_strings % tuple(cols_width) format_strings = format_strings.split(' ') - table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n' - headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n' + table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n' + headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n' # FIXME: layout.cheaders self.write(table_linesep) for i in range(len(table_content)): - self.write('|') + self.write(u'|') line = table_content[i] for j in range(len(line)): self.write(format_strings[j] % line[j]) - self.write('|') + self.write(u'|') if i == 0 and layout.rheaders: self.write(headsep) else: @@ -104,7 +109,7 @@ def default_table(self, layout, table_content, cols_width): def field_table(self, layout, table_content, cols_width): """special case for field table""" assert layout.cols == 2 - format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0]) + format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0]) for field, value in table_content: self.write(format_string % (field, value)) @@ -115,14 +120,14 @@ def visit_list(self, layout): indent = ' ' * self.list_level self.list_level += 1 for child in layout.children: - self.write('%s%s%s ' % (linesep, indent, bullet)) + self.write(u'%s%s%s ' % (linesep, indent, bullet)) child.accept(self) self.list_level -= 1 def visit_link(self, layout): """add a hyperlink""" if layout.label != layout.url: - self.write('`%s`_' % layout.label) + self.write(u'`%s`_' % layout.label) self.pending_urls.append( (layout.label, layout.url) ) else: self.write(layout.url) @@ -130,11 +135,11 @@ def visit_link(self, layout): def visit_verbatimtext(self, layout): """display a verbatim layout as text (so difficult ;) """ - self.writeln('::\n') + self.writeln(u'::\n') for line in layout.data.splitlines(): - self.writeln(' ' + line) + self.writeln(u' ' + line) self.writeln() def visit_text(self, layout): """add some text""" - self.write(layout.data) + self.write(u'%s' % layout.data) diff --git a/pymode/libs/logilab/common/urllib2ext.py b/pymode/libs/logilab/common/urllib2ext.py new file mode 100644 index 00000000..339aec06 --- /dev/null +++ b/pymode/libs/logilab/common/urllib2ext.py @@ -0,0 +1,89 @@ +from __future__ import print_function + +import logging +import urllib2 + +import kerberos as krb + +class GssapiAuthError(Exception): + """raised on error during authentication process""" + +import re +RGX = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I) + +def get_negociate_value(headers): + for authreq in headers.getheaders('www-authenticate'): + match = RGX.search(authreq) + if match: + return match.group(1) + +class HTTPGssapiAuthHandler(urllib2.BaseHandler): + """Negotiate HTTP authentication using context from GSSAPI""" + + handler_order = 400 # before Digest Auth + + def __init__(self): + self._reset() + + def _reset(self): + self._retried = 0 + self._context = None + + def clean_context(self): + if self._context is not None: + krb.authGSSClientClean(self._context) + + def http_error_401(self, req, fp, code, msg, headers): + try: + if self._retried > 5: + raise urllib2.HTTPError(req.get_full_url(), 401, + "negotiate auth failed", headers, None) + self._retried += 1 + logging.debug('gssapi handler, try %s' % self._retried) + negotiate = get_negociate_value(headers) + if negotiate is None: + logging.debug('no negociate found in a www-authenticate header') + return None + logging.debug('HTTPGssapiAuthHandler: negotiate 1 is %r' % negotiate) + result, self._context = krb.authGSSClientInit("HTTP@%s" % req.get_host()) + if result < 1: + raise GssapiAuthError("HTTPGssapiAuthHandler: init failed with %d" % result) + result = krb.authGSSClientStep(self._context, negotiate) + if result < 0: + raise GssapiAuthError("HTTPGssapiAuthHandler: step 1 failed with %d" % result) + client_response = krb.authGSSClientResponse(self._context) + logging.debug('HTTPGssapiAuthHandler: client response is %s...' % client_response[:10]) + req.add_unredirected_header('Authorization', "Negotiate %s" % client_response) + server_response = self.parent.open(req) + negotiate = get_negociate_value(server_response.info()) + if negotiate is None: + logging.warning('HTTPGssapiAuthHandler: failed to authenticate server') + else: + logging.debug('HTTPGssapiAuthHandler negotiate 2: %s' % negotiate) + result = krb.authGSSClientStep(self._context, negotiate) + if result < 1: + raise GssapiAuthError("HTTPGssapiAuthHandler: step 2 failed with %d" % result) + return server_response + except GssapiAuthError as exc: + logging.error(repr(exc)) + finally: + self.clean_context() + self._reset() + +if __name__ == '__main__': + import sys + # debug + import httplib + httplib.HTTPConnection.debuglevel = 1 + httplib.HTTPSConnection.debuglevel = 1 + # debug + import logging + logging.basicConfig(level=logging.DEBUG) + # handle cookies + import cookielib + cj = cookielib.CookieJar() + ch = urllib2.HTTPCookieProcessor(cj) + # test with url sys.argv[1] + h = HTTPGssapiAuthHandler() + response = urllib2.build_opener(h, ch).open(sys.argv[1]) + print('\nresponse: %s\n--------------\n' % response.code, response.info()) diff --git a/pymode/libs/logilab/common/vcgutils.py b/pymode/libs/logilab/common/vcgutils.py new file mode 100644 index 00000000..9cd2acda --- /dev/null +++ b/pymode/libs/logilab/common/vcgutils.py @@ -0,0 +1,216 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""Functions to generate files readable with Georg Sander's vcg +(Visualization of Compiler Graphs). + +You can download vcg at http://rw4.cs.uni-sb.de/~sander/html/gshome.html +Note that vcg exists as a debian package. + +See vcg's documentation for explanation about the different values that +maybe used for the functions parameters. + + + + +""" +__docformat__ = "restructuredtext en" + +import string + +ATTRS_VAL = { + 'algos': ('dfs', 'tree', 'minbackward', + 'left_to_right', 'right_to_left', + 'top_to_bottom', 'bottom_to_top', + 'maxdepth', 'maxdepthslow', 'mindepth', 'mindepthslow', + 'mindegree', 'minindegree', 'minoutdegree', + 'maxdegree', 'maxindegree', 'maxoutdegree'), + 'booleans': ('yes', 'no'), + 'colors': ('black', 'white', 'blue', 'red', 'green', 'yellow', + 'magenta', 'lightgrey', + 'cyan', 'darkgrey', 'darkblue', 'darkred', 'darkgreen', + 'darkyellow', 'darkmagenta', 'darkcyan', 'gold', + 'lightblue', 'lightred', 'lightgreen', 'lightyellow', + 'lightmagenta', 'lightcyan', 'lilac', 'turquoise', + 'aquamarine', 'khaki', 'purple', 'yellowgreen', 'pink', + 'orange', 'orchid'), + 'shapes': ('box', 'ellipse', 'rhomb', 'triangle'), + 'textmodes': ('center', 'left_justify', 'right_justify'), + 'arrowstyles': ('solid', 'line', 'none'), + 'linestyles': ('continuous', 'dashed', 'dotted', 'invisible'), + } + +# meaning of possible values: +# O -> string +# 1 -> int +# list -> value in list +GRAPH_ATTRS = { + 'title': 0, + 'label': 0, + 'color': ATTRS_VAL['colors'], + 'textcolor': ATTRS_VAL['colors'], + 'bordercolor': ATTRS_VAL['colors'], + 'width': 1, + 'height': 1, + 'borderwidth': 1, + 'textmode': ATTRS_VAL['textmodes'], + 'shape': ATTRS_VAL['shapes'], + 'shrink': 1, + 'stretch': 1, + 'orientation': ATTRS_VAL['algos'], + 'vertical_order': 1, + 'horizontal_order': 1, + 'xspace': 1, + 'yspace': 1, + 'layoutalgorithm': ATTRS_VAL['algos'], + 'late_edge_labels': ATTRS_VAL['booleans'], + 'display_edge_labels': ATTRS_VAL['booleans'], + 'dirty_edge_labels': ATTRS_VAL['booleans'], + 'finetuning': ATTRS_VAL['booleans'], + 'manhattan_edges': ATTRS_VAL['booleans'], + 'smanhattan_edges': ATTRS_VAL['booleans'], + 'port_sharing': ATTRS_VAL['booleans'], + 'edges': ATTRS_VAL['booleans'], + 'nodes': ATTRS_VAL['booleans'], + 'splines': ATTRS_VAL['booleans'], + } +NODE_ATTRS = { + 'title': 0, + 'label': 0, + 'color': ATTRS_VAL['colors'], + 'textcolor': ATTRS_VAL['colors'], + 'bordercolor': ATTRS_VAL['colors'], + 'width': 1, + 'height': 1, + 'borderwidth': 1, + 'textmode': ATTRS_VAL['textmodes'], + 'shape': ATTRS_VAL['shapes'], + 'shrink': 1, + 'stretch': 1, + 'vertical_order': 1, + 'horizontal_order': 1, + } +EDGE_ATTRS = { + 'sourcename': 0, + 'targetname': 0, + 'label': 0, + 'linestyle': ATTRS_VAL['linestyles'], + 'class': 1, + 'thickness': 0, + 'color': ATTRS_VAL['colors'], + 'textcolor': ATTRS_VAL['colors'], + 'arrowcolor': ATTRS_VAL['colors'], + 'backarrowcolor': ATTRS_VAL['colors'], + 'arrowsize': 1, + 'backarrowsize': 1, + 'arrowstyle': ATTRS_VAL['arrowstyles'], + 'backarrowstyle': ATTRS_VAL['arrowstyles'], + 'textmode': ATTRS_VAL['textmodes'], + 'priority': 1, + 'anchor': 1, + 'horizontal_order': 1, + } + + +# Misc utilities ############################################################### + +def latin_to_vcg(st): + """Convert latin characters using vcg escape sequence. + """ + for char in st: + if char not in string.ascii_letters: + try: + num = ord(char) + if num >= 192: + st = st.replace(char, r'\fi%d'%ord(char)) + except: + pass + return st + + +class VCGPrinter: + """A vcg graph writer. + """ + + def __init__(self, output_stream): + self._stream = output_stream + self._indent = '' + + def open_graph(self, **args): + """open a vcg graph + """ + self._stream.write('%sgraph:{\n'%self._indent) + self._inc_indent() + self._write_attributes(GRAPH_ATTRS, **args) + + def close_graph(self): + """close a vcg graph + """ + self._dec_indent() + self._stream.write('%s}\n'%self._indent) + + + def node(self, title, **args): + """draw a node + """ + self._stream.write('%snode: {title:"%s"' % (self._indent, title)) + self._write_attributes(NODE_ATTRS, **args) + self._stream.write('}\n') + + + def edge(self, from_node, to_node, edge_type='', **args): + """draw an edge from a node to another. + """ + self._stream.write( + '%s%sedge: {sourcename:"%s" targetname:"%s"' % ( + self._indent, edge_type, from_node, to_node)) + self._write_attributes(EDGE_ATTRS, **args) + self._stream.write('}\n') + + + # private ################################################################## + + def _write_attributes(self, attributes_dict, **args): + """write graph, node or edge attributes + """ + for key, value in args.items(): + try: + _type = attributes_dict[key] + except KeyError: + raise Exception('''no such attribute %s +possible attributes are %s''' % (key, attributes_dict.keys())) + + if not _type: + self._stream.write('%s%s:"%s"\n' % (self._indent, key, value)) + elif _type == 1: + self._stream.write('%s%s:%s\n' % (self._indent, key, + int(value))) + elif value in _type: + self._stream.write('%s%s:%s\n' % (self._indent, key, value)) + else: + raise Exception('''value %s isn\'t correct for attribute %s +correct values are %s''' % (value, key, _type)) + + def _inc_indent(self): + """increment indentation + """ + self._indent = ' %s' % self._indent + + def _dec_indent(self): + """decrement indentation + """ + self._indent = self._indent[:-2] diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/visitor.py b/pymode/libs/logilab/common/visitor.py similarity index 97% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/common/visitor.py rename to pymode/libs/logilab/common/visitor.py index 802d2bef..ed2b70f9 100644 --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/visitor.py +++ b/pymode/libs/logilab/common/visitor.py @@ -35,12 +35,14 @@ def __init__(self, node, list_func, filter_func=None): filter_func = no_filter self._list = list_func(node, filter_func) - def next(self): + def __next__(self): try: return self._list.pop(0) except : return None + next = __next__ + # Base Visitor ################################################################ class Visitor(object): @@ -61,10 +63,10 @@ def visit(self, node, *args, **kargs): def _visit(self, node): iterator = self._get_iterator(node) - n = iterator.next() + n = next(iterator) while n: result = n.accept(self) - n = iterator.next() + n = next(iterator) return result def _get_iterator(self, node): diff --git a/pymode/libs/logilab/common/xmlutils.py b/pymode/libs/logilab/common/xmlutils.py new file mode 100644 index 00000000..d383b9d5 --- /dev/null +++ b/pymode/libs/logilab/common/xmlutils.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of logilab-common. +# +# logilab-common is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-common. If not, see . +"""XML utilities. + +This module contains useful functions for parsing and using XML data. For the +moment, there is only one function that can parse the data inside a processing +instruction and return a Python dictionary. + + + + +""" +__docformat__ = "restructuredtext en" + +import re + +RE_DOUBLE_QUOTE = re.compile('([\w\-\.]+)="([^"]+)"') +RE_SIMPLE_QUOTE = re.compile("([\w\-\.]+)='([^']+)'") + +def parse_pi_data(pi_data): + """ + Utility function that parses the data contained in an XML + processing instruction and returns a dictionary of keywords and their + associated values (most of the time, the processing instructions contain + data like ``keyword="value"``, if a keyword is not associated to a value, + for example ``keyword``, it will be associated to ``None``). + + :param pi_data: data contained in an XML processing instruction. + :type pi_data: unicode + + :returns: Dictionary of the keywords (Unicode strings) associated to + their values (Unicode strings) as they were defined in the + data. + :rtype: dict + """ + results = {} + for elt in pi_data.split(): + if RE_DOUBLE_QUOTE.match(elt): + kwd, val = RE_DOUBLE_QUOTE.match(elt).groups() + elif RE_SIMPLE_QUOTE.match(elt): + kwd, val = RE_SIMPLE_QUOTE.match(elt).groups() + else: + kwd, val = elt, None + results[kwd] = val + return results diff --git a/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth b/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth new file mode 100644 index 00000000..d268b884 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('logilab',));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('logilab', types.ModuleType('logilab'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst b/pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..6b483af3 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,153 @@ +Logilab's common library +======================== + +What's this ? +------------- + +This package contains some modules used by different Logilab projects. + +It is released under the GNU Lesser General Public License. + +There is no documentation available yet but the source code should be clean and +well documented. + +Designed to ease: + +* handling command line options and configuration files +* writing interactive command line tools +* manipulation of files and character strings +* manipulation of common structures such as graph, tree, and pattern such as visitor +* generating text and HTML reports +* more... + + +Installation +------------ + +Extract the tarball, jump into the created directory and run :: + + python setup.py install + +For installation options, see :: + + python setup.py install --help + + +Provided modules +---------------- + +Here is a brief description of the available modules. + +Modules providing high-level features +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `cache`, a cache implementation with a least recently used algorithm. + +* `changelog`, a tiny library to manipulate our simplified ChangeLog file format. + +* `clcommands`, high-level classes to define command line programs handling + different subcommands. It is based on `configuration` to get easy command line + / configuration file handling. + +* `configuration`, some classes to handle unified configuration from both + command line (using optparse) and configuration file (using ConfigParser). + +* `proc`, interface to Linux /proc. + +* `umessage`, unicode email support. + +* `ureports`, micro-reports, a way to create simple reports using python objects + without care of the final formatting. ReST and html formatters are provided. + + +Modules providing low-level functions and structures +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `compat`, provides a transparent compatibility layer between different python + versions. + +* `date`, a set of date manipulation functions. + +* `daemon`, a daemon function and mix-in class to properly start an Unix daemon + process. + +* `decorators`, function decorators such as cached, timed... + +* `deprecation`, decorator, metaclass & all to mark functions / classes as + deprecated or moved + +* `fileutils`, some file / file path manipulation utilities. + +* `graph`, graph manipulations functions such as cycle detection, bases for dot + file generation. + +* `modutils`, python module manipulation functions. + +* `shellutils`, some powerful shell like functions to replace shell scripts with + python scripts. + +* `tasksqueue`, a prioritized tasks queue implementation. + +* `textutils`, some text manipulation functions (ansi colorization, line wrapping, + rest support...). + +* `tree`, base class to represent tree structure, and some others to make it + works with the visitor implementation (see below). + +* `visitor`, a generic visitor pattern implementation. + + +Modules extending some standard modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `debugger`, `pdb` customization. + +* `logging_ext`, extensions to `logging` module such as a colorized formatter + and an easier initialization function. + +* `optik_ext`, defines some new option types (regexp, csv, color, date, etc.) + for `optik` / `optparse` + + +Modules extending some external modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive. + +* `vcgutils` , utilities functions to generate file readable with Georg Sander's + vcg tool (Visualization of Compiler Graphs). + + +To be deprecated modules +~~~~~~~~~~~~~~~~~~~~~~~~ + +Those `logilab.common` modules will much probably be deprecated in future +versions: + +* `testlib`: use `unittest2`_ instead +* `pytest`: use `discover`_ instead +* `interface`: use `zope.interface`_ if you really want this +* `table`, `xmlutils`: is that used? +* `sphinxutils`: we won't go that way imo (i == syt) + + +Comments, support, bug reports +------------------------------ + +Project page https://www.logilab.org/project/logilab-common + +Use the python-projects@lists.logilab.org mailing list. + +You can subscribe to this mailing list at +https://lists.logilab.org/mailman/listinfo/python-projects + +Archives are available at +https://lists.logilab.org/pipermail/python-projects/ + + +.. _Sphinx: http://sphinx.pocoo.org/ +.. _`unittest2`: http://pypi.python.org/pypi/unittest2 +.. _`discover`: http://pypi.python.org/pypi/discover +.. _`zope.interface`: http://pypi.python.org/pypi/zope.interface + + diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/METADATA b/pymode/libs/logilab_common-1.0.2.dist-info/METADATA new file mode 100644 index 00000000..9a00a498 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/METADATA @@ -0,0 +1,169 @@ +Metadata-Version: 2.0 +Name: logilab-common +Version: 1.0.2 +Summary: collection of low-level Python packages and modules used by Logilab projects +Home-page: http://www.logilab.org/project/logilab-common +Author: Logilab +Author-email: contact@logilab.fr +License: LGPL +Platform: UNKNOWN +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Requires-Dist: setuptools +Requires-Dist: six (>=1.4.0) + +Logilab's common library +======================== + +What's this ? +------------- + +This package contains some modules used by different Logilab projects. + +It is released under the GNU Lesser General Public License. + +There is no documentation available yet but the source code should be clean and +well documented. + +Designed to ease: + +* handling command line options and configuration files +* writing interactive command line tools +* manipulation of files and character strings +* manipulation of common structures such as graph, tree, and pattern such as visitor +* generating text and HTML reports +* more... + + +Installation +------------ + +Extract the tarball, jump into the created directory and run :: + + python setup.py install + +For installation options, see :: + + python setup.py install --help + + +Provided modules +---------------- + +Here is a brief description of the available modules. + +Modules providing high-level features +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `cache`, a cache implementation with a least recently used algorithm. + +* `changelog`, a tiny library to manipulate our simplified ChangeLog file format. + +* `clcommands`, high-level classes to define command line programs handling + different subcommands. It is based on `configuration` to get easy command line + / configuration file handling. + +* `configuration`, some classes to handle unified configuration from both + command line (using optparse) and configuration file (using ConfigParser). + +* `proc`, interface to Linux /proc. + +* `umessage`, unicode email support. + +* `ureports`, micro-reports, a way to create simple reports using python objects + without care of the final formatting. ReST and html formatters are provided. + + +Modules providing low-level functions and structures +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `compat`, provides a transparent compatibility layer between different python + versions. + +* `date`, a set of date manipulation functions. + +* `daemon`, a daemon function and mix-in class to properly start an Unix daemon + process. + +* `decorators`, function decorators such as cached, timed... + +* `deprecation`, decorator, metaclass & all to mark functions / classes as + deprecated or moved + +* `fileutils`, some file / file path manipulation utilities. + +* `graph`, graph manipulations functions such as cycle detection, bases for dot + file generation. + +* `modutils`, python module manipulation functions. + +* `shellutils`, some powerful shell like functions to replace shell scripts with + python scripts. + +* `tasksqueue`, a prioritized tasks queue implementation. + +* `textutils`, some text manipulation functions (ansi colorization, line wrapping, + rest support...). + +* `tree`, base class to represent tree structure, and some others to make it + works with the visitor implementation (see below). + +* `visitor`, a generic visitor pattern implementation. + + +Modules extending some standard modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `debugger`, `pdb` customization. + +* `logging_ext`, extensions to `logging` module such as a colorized formatter + and an easier initialization function. + +* `optik_ext`, defines some new option types (regexp, csv, color, date, etc.) + for `optik` / `optparse` + + +Modules extending some external modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive. + +* `vcgutils` , utilities functions to generate file readable with Georg Sander's + vcg tool (Visualization of Compiler Graphs). + + +To be deprecated modules +~~~~~~~~~~~~~~~~~~~~~~~~ + +Those `logilab.common` modules will much probably be deprecated in future +versions: + +* `testlib`: use `unittest2`_ instead +* `pytest`: use `discover`_ instead +* `interface`: use `zope.interface`_ if you really want this +* `table`, `xmlutils`: is that used? +* `sphinxutils`: we won't go that way imo (i == syt) + + +Comments, support, bug reports +------------------------------ + +Project page https://www.logilab.org/project/logilab-common + +Use the python-projects@lists.logilab.org mailing list. + +You can subscribe to this mailing list at +https://lists.logilab.org/mailman/listinfo/python-projects + +Archives are available at +https://lists.logilab.org/pipermail/python-projects/ + + +.. _Sphinx: http://sphinx.pocoo.org/ +.. _`unittest2`: http://pypi.python.org/pypi/unittest2 +.. _`discover`: http://pypi.python.org/pypi/discover +.. _`zope.interface`: http://pypi.python.org/pypi/zope.interface + + diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/RECORD b/pymode/libs/logilab_common-1.0.2.dist-info/RECORD new file mode 100644 index 00000000..e6e4730a --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/RECORD @@ -0,0 +1,87 @@ +logilab_common-1.0.2-py2.7-nspkg.pth,sha256=ZY-Jf8tK2WQu_mjLvZuFpvpX9uwdpX3yDS1AuRncCZA,308 +logilab/common/__init__.py,sha256=UiR9rv7f7WsAnIHsxa3UApVCJGTzXbZoC-c4EQJpcvg,5390 +logilab/common/cache.py,sha256=wmY87WSoyERDhAlfIKKUipYavlZPpm3sGAQMpzbDHTM,3621 +logilab/common/changelog.py,sha256=Ea_4j22rWJJ33VSCj4Lz0pBGP0wP7LMP2Zo4DR7iZIo,8075 +logilab/common/clcommands.py,sha256=abMNAsB6ADT7Ns5MsxNtAMOlTQGJLCMO9MUkNYdsVG8,11237 +logilab/common/compat.py,sha256=rMGytWS1DCo35MdKUocU1LfLbZA0RyK79Gyu7lvd6Rg,2593 +logilab/common/configuration.py,sha256=s4rg7Qa1_4bpWlTg-bEaHYUcrgvuoDt75ZJgRnlFsME,42160 +logilab/common/daemon.py,sha256=Eqwo_oKjrHtS9SLrtSfeghRTCjqvveGho43s7vMkd7A,3337 +logilab/common/date.py,sha256=nnUN-4onEaWSR8r4PvtmJyn5ukfFzasjEcOGzEdrvqQ,11230 +logilab/common/debugger.py,sha256=Bw2-yI9KrvSgPLDksda4F8nuK_DvxnSCS-ymPSVc778,7094 +logilab/common/decorators.py,sha256=4DD3iNgEQPVz5hPp-SbbgD-ZObXhaeazGqKleyHdXaw,8868 +logilab/common/deprecation.py,sha256=MAxc_Ds9H_j6C7d4VQqMQPB1j-Ib8vy7iBWoQa8aRHs,7417 +logilab/common/fileutils.py,sha256=kCk_8odmAKnYPHPhUruuV-6og8N9kT8fplV-pvwwd4A,12738 +logilab/common/graph.py,sha256=GTSN-kP40EHjnHXk1vxO-56rEszo-esu1S3hf-SOddw,10247 +logilab/common/interface.py,sha256=dXl6kiuXSpefxauu7J6CUv0soe09wjT4_vXbeWQFgJ8,2593 +logilab/common/logging_ext.py,sha256=Yi8k2fGqr_tt-YApT1JjroNpXETxfj84HKmgTgO22Nw,6975 +logilab/common/modutils.py,sha256=w2LVy_vzhGoyBRrKivx0hqx8n326KrtTUezelEwDAcc,24002 +logilab/common/optik_ext.py,sha256=_aZgWKTKCC8_vYIpstNCOk8wewwZ4jfrpvXWrmPzn5Y,13451 +logilab/common/optparser.py,sha256=QgDoAyVoRy7U1fG9BSZ0O7LQsyNayo1HAelZaKlb4kY,3386 +logilab/common/proc.py,sha256=RGMlPuc11FfrIsqzqNFO3Q6buqt8dvMwXfXKXfwAHks,9352 +logilab/common/pytest.py,sha256=ac7hVpAb06TstSjPV586h1wW21Y__XH5bjrwX55dDOE,46736 +logilab/common/registry.py,sha256=0qIJfNJiqM1HkI-twKHfXiTPU5HKSGRrS-P0Dsj56qw,41550 +logilab/common/shellutils.py,sha256=ZFZ19eX0TCcDrsbOWiy7sr1oqnhQsLixv9n8HakcJiM,14363 +logilab/common/sphinx_ext.py,sha256=pbKN0ObMDY_jy9ehP_7NOKMo40LbQLjf0xntmxHnGr8,3329 +logilab/common/sphinxutils.py,sha256=piY1R04GNR-i1mIb4PRhbGbmbDZPhDsn1FBAiA_Bbrg,4444 +logilab/common/table.py,sha256=5NEx4Ju-jk2CV6W-jxTpOoYArt2BlRpaTZZUBGwu1kg,31408 +logilab/common/tasksqueue.py,sha256=wFE0C0FiuHGBoCnvU-_Kno1eM_Em6yYxYvND6emRN34,2987 +logilab/common/testlib.py,sha256=2Ra9OPs5QpQv7hoZod3M2yYCUdtqSaN3LAvVyiQyA1k,50506 +logilab/common/textutils.py,sha256=TgPGqkN3JsJuR7VxnkoWaOWfkwHiVNB9gpId_3S2xO4,17277 +logilab/common/tree.py,sha256=Y-sa_pfI17cCb-vkyJMaBW3XKVNrreexBgBMPpQJDy0,10606 +logilab/common/umessage.py,sha256=2BuxspHkPEXhlf-XVDye25Mt0RUELneay-K1KNLcS9c,6551 +logilab/common/urllib2ext.py,sha256=FOpxVrbAPtY_6ssq3Qui3zxzckAqLJe9kGkp8tLR0Ic,3416 +logilab/common/vcgutils.py,sha256=tNfi6jxZ4xdUvrjw1cKOodecRlcD0U3MQvTb5HrY5fE,7673 +logilab/common/visitor.py,sha256=5Oc9Y88Kx4wiZ6JAFYFeXwKrMS8jNph9ENVWG3oim1E,3444 +logilab/common/xmlutils.py,sha256=2e4FM-X1PLKBaTG6etLHsAIrtZQiDEA9U7WqM3KjNks,2273 +logilab/common/ureports/__init__.py,sha256=b3_8f4mAm6T3O_-klutleWZ99XjlR-AELfuLEyCbzQ8,6113 +logilab/common/ureports/docbook_writer.py,sha256=KSkIk0W4C4E6DR-Ul_Y9jgnd4_tgVVu15LnU8p2RoeM,5706 +logilab/common/ureports/html_writer.py,sha256=Ee_x9rXjx2NZp290e-0C7nu7VYuKpkCsrl79m4HLI5g,4956 +logilab/common/ureports/nodes.py,sha256=t2NQiL6LQV94D8ugitklVnZRVbz6kP5QkUrl8zGsmMQ,5838 +logilab/common/ureports/text_writer.py,sha256=cMBHbA36_1NrKKnx5LBKczGQmBRg4aObkpr1d581ORU,5212 +../../bin/pytest,sha256=vkYcOC21mDzGBrz4-ajilr8TGxa9tRabxQhyYyXeEDE,124 +logilab_common-1.0.2.dist-info/DESCRIPTION.rst,sha256=bMLyPRBRS-tSzW5zhchxcLlPbYHRv0XEMqs6Oln2z5U,4426 +logilab_common-1.0.2.dist-info/METADATA,sha256=3_iFYhN84fXSjkdjzHv3grHBY2xIZVLSkmuBeTSnLQE,4934 +logilab_common-1.0.2.dist-info/metadata.json,sha256=dTwpZUieC7dZFkKiNdtgVExm2w1B44k4ZDSaCP3ASXo,742 +logilab_common-1.0.2.dist-info/namespace_packages.txt,sha256=xXemaIbd-285ANf3yiCDkMHRTZSuLvlqL_MTLEJKMuk,8 +logilab_common-1.0.2.dist-info/RECORD,, +logilab_common-1.0.2.dist-info/top_level.txt,sha256=xXemaIbd-285ANf3yiCDkMHRTZSuLvlqL_MTLEJKMuk,8 +logilab_common-1.0.2.dist-info/WHEEL,sha256=54bVun1KfEBTJ68SHUmbxNPj80VxlQ0sHi4gZdGZXEY,92 +logilab/common/logging_ext.pyc,, +logilab/common/date.pyc,, +logilab/common/modutils.pyc,, +logilab/common/ureports/__init__.pyc,, +logilab/common/sphinxutils.pyc,, +logilab/common/ureports/text_writer.pyc,, +logilab/common/optik_ext.pyc,, +logilab/common/visitor.pyc,, +logilab/common/debugger.pyc,, +logilab/common/compat.pyc,, +logilab/common/decorators.pyc,, +logilab/common/textutils.pyc,, +logilab/common/ureports/docbook_writer.pyc,, +logilab/common/shellutils.pyc,, +logilab/common/changelog.pyc,, +logilab/common/interface.pyc,, +logilab/common/ureports/nodes.pyc,, +logilab/common/pytest.pyc,, +logilab/common/sphinx_ext.pyc,, +logilab/common/xmlutils.pyc,, +logilab/common/__init__.pyc,, +logilab/common/tree.pyc,, +logilab/common/umessage.pyc,, +logilab/common/registry.pyc,, +logilab/common/proc.pyc,, +logilab/common/urllib2ext.pyc,, +logilab/common/testlib.pyc,, +logilab/common/clcommands.pyc,, +logilab/common/ureports/html_writer.pyc,, +logilab/common/vcgutils.pyc,, +logilab/common/daemon.pyc,, +logilab/common/table.pyc,, +logilab/common/optparser.pyc,, +logilab/common/deprecation.pyc,, +logilab/common/tasksqueue.pyc,, +logilab/common/fileutils.pyc,, +logilab/common/graph.pyc,, +logilab/common/cache.pyc,, +logilab/common/configuration.pyc,, diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL b/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL new file mode 100644 index 00000000..45a0cd88 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any + diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json b/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json new file mode 100644 index 00000000..54212666 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "LGPL", "name": "logilab-common", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "test_requires": [{"requires": ["pytz"]}], "summary": "collection of low-level Python packages and modules used by Logilab projects", "run_requires": [{"requires": ["setuptools", "six (>=1.4.0)"]}], "version": "1.0.2", "extensions": {"python.details": {"project_urls": {"Home": "http://www.logilab.org/project/logilab-common"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "contact@logilab.fr", "name": "Logilab"}]}}, "classifiers": ["Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3"], "extras": []} \ No newline at end of file diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt b/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt new file mode 100644 index 00000000..3ac267a9 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +logilab diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt b/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt new file mode 100644 index 00000000..3ac267a9 --- /dev/null +++ b/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +logilab diff --git a/pymode/libs/pylama/lint/pylama_mccabe/mccabe.py b/pymode/libs/mccabe.py similarity index 86% rename from pymode/libs/pylama/lint/pylama_mccabe/mccabe.py rename to pymode/libs/mccabe.py index 82bdf5f0..90bf19cd 100644 --- a/pymode/libs/pylama/lint/pylama_mccabe/mccabe.py +++ b/pymode/libs/mccabe.py @@ -14,7 +14,7 @@ except ImportError: # Python 2.5 from flake8.util import ast, iter_child_nodes -__version__ = '0.2.1' +__version__ = '0.3.1' class ASTVisitor(object): @@ -67,6 +67,8 @@ def __init__(self, name, entity, lineno): def connect(self, n1, n2): self.nodes[n1].append(n2) + # Ensure that the destination node is always counted. + self.nodes[n2] = [] def to_dot(self): print('subgraph {') @@ -160,34 +162,37 @@ def visitSimpleStatement(self, node): def visitLoop(self, node): name = "Loop %d" % node.lineno + self._subgraph(node, name) + visitFor = visitWhile = visitLoop + + def visitIf(self, node): + name = "If %d" % node.lineno + self._subgraph(node, name) + + def _subgraph(self, node, name, extra_blocks=()): + """create the subgraphs representing any `if` and `for` statements""" if self.graph is None: # global loop self.graph = PathGraph(name, name, node.lineno) pathnode = PathNode(name) - self.tail = pathnode - self.dispatch_list(node.body) + self._subgraph_parse(node, pathnode, extra_blocks) self.graphs["%s%s" % (self.classname, name)] = self.graph self.reset() else: pathnode = self.appendPathNode(name) - self.tail = pathnode - self.dispatch_list(node.body) - bottom = PathNode("", look='point') - self.graph.connect(self.tail, bottom) - self.graph.connect(pathnode, bottom) - self.tail = bottom - - # TODO: else clause in node.orelse + self._subgraph_parse(node, pathnode, extra_blocks) - visitFor = visitWhile = visitLoop - - def visitIf(self, node): - name = "If %d" % node.lineno - pathnode = self.appendPathNode(name) + def _subgraph_parse(self, node, pathnode, extra_blocks): + """parse the body and any `else` block of `if` and `for` statements""" loose_ends = [] + self.tail = pathnode self.dispatch_list(node.body) loose_ends.append(self.tail) + for extra in extra_blocks: + self.tail = pathnode + self.dispatch_list(extra.body) + loose_ends.append(self.tail) if node.orelse: self.tail = pathnode self.dispatch_list(node.orelse) @@ -202,19 +207,9 @@ def visitIf(self, node): def visitTryExcept(self, node): name = "TryExcept %d" % node.lineno - pathnode = self.appendPathNode(name) - loose_ends = [] - self.dispatch_list(node.body) - loose_ends.append(self.tail) - for handler in node.handlers: - self.tail = pathnode - self.dispatch_list(handler.body) - loose_ends.append(self.tail) - if pathnode: - bottom = PathNode("", look='point') - for le in loose_ends: - self.graph.connect(le, bottom) - self.tail = bottom + self._subgraph(node, name, extra_blocks=node.handlers) + + visitTry = visitTryExcept def visitWith(self, node): name = "With %d" % node.lineno @@ -241,7 +236,7 @@ def add_options(cls, parser): @classmethod def parse_options(cls, options): - cls.max_complexity = options.max_complexity + cls.max_complexity = int(options.max_complexity) def run(self): if self.max_complexity < 0: @@ -249,7 +244,7 @@ def run(self): visitor = PathGraphingAstVisitor() visitor.preorder(self.tree, visitor) for graph in visitor.graphs.values(): - if graph.complexity() >= self.max_complexity: + if graph.complexity() > self.max_complexity: text = self._error_tmpl % (graph.entity, graph.complexity()) yield graph.lineno, 0, text, type(self) @@ -265,13 +260,12 @@ def get_code_complexity(code, threshold=7, filename='stdin'): complx = [] McCabeChecker.max_complexity = threshold for lineno, offset, text, check in McCabeChecker(tree, filename).run(): - complx.append(dict( - type=McCabeChecker._code, - lnum=lineno, - text=text, - )) + complx.append('%s:%d:1: %s' % (filename, lineno, text)) - return complx + if len(complx) == 0: + return 0 + print('\n'.join(complx)) + return len(complx) def get_module_complexity(module_path, threshold=7): @@ -281,13 +275,15 @@ def get_module_complexity(module_path, threshold=7): return get_code_complexity(code, threshold, filename=module_path) -def main(argv): +def main(argv=None): + if argv is None: + argv = sys.argv[1:] opar = optparse.OptionParser() opar.add_option("-d", "--dot", dest="dot", help="output a graphviz dot file", action="store_true") opar.add_option("-m", "--min", dest="threshold", help="minimum complexity for output", type="int", - default=2) + default=1) options, args = opar.parse_args(argv) @@ -300,7 +296,8 @@ def main(argv): if options.dot: print('graph {') for graph in visitor.graphs.values(): - if graph.complexity() >= options.threshold: + if (not options.threshold or + graph.complexity() >= options.threshold): graph.to_dot() print('}') else: @@ -311,3 +308,4 @@ def main(argv): if __name__ == '__main__': main(sys.argv[1:]) + diff --git a/pymode/libs/pep257.py b/pymode/libs/pep257.py new file mode 100644 index 00000000..79d9eee1 --- /dev/null +++ b/pymode/libs/pep257.py @@ -0,0 +1,1187 @@ +#! /usr/bin/env python +"""Static analysis tool for checking docstring conventions and style. + +Implemented checks cover PEP257: +http://www.python.org/dev/peps/pep-0257/ + +Other checks can be added, e.g. NumPy docstring conventions: +https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt + +The repository is located at: +http://github.com/GreenSteam/pep257 + +""" +from __future__ import with_statement + +import os +import sys +import logging +import tokenize as tk +from itertools import takewhile, dropwhile, chain +from optparse import OptionParser +from re import compile as re +import itertools + +try: # Python 3.x + from ConfigParser import RawConfigParser +except ImportError: # Python 2.x + from configparser import RawConfigParser + +log = logging.getLogger(__name__) + + +try: + from StringIO import StringIO +except ImportError: # Python 3.0 and later + from io import StringIO + + +try: + next +except NameError: # Python 2.5 and earlier + nothing = object() + + def next(obj, default=nothing): + if default == nothing: + return obj.next() + else: + try: + return obj.next() + except StopIteration: + return default + + +# If possible (python >= 3.2) use tokenize.open to open files, so PEP 263 +# encoding markers are interpreted. +try: + tokenize_open = tk.open +except AttributeError: + tokenize_open = open + + +__version__ = '0.6.1-alpha' +__all__ = ('check', 'collect') + +PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep257') +NO_VIOLATIONS_RETURN_CODE = 0 +VIOLATIONS_RETURN_CODE = 1 +INVALID_OPTIONS_RETURN_CODE = 2 + + +def humanize(string): + return re(r'(.)([A-Z]+)').sub(r'\1 \2', string).lower() + + +def is_magic(name): + return name.startswith('__') and name.endswith('__') + + +def is_ascii(string): + return all(ord(char) < 128 for char in string) + + +def is_blank(string): + return not string.strip() + + +def leading_space(string): + return re('\s*').match(string).group() + + +class Value(object): + + def __init__(self, *args): + vars(self).update(zip(self._fields, args)) + + def __hash__(self): + return hash(repr(self)) + + def __eq__(self, other): + return other and vars(self) == vars(other) + + def __repr__(self): + kwargs = ', '.join('{}={!r}'.format(field, getattr(self, field)) + for field in self._fields) + return '{}({})'.format(self.__class__.__name__, kwargs) + + +class Definition(Value): + + _fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring', + 'children', 'parent') + + _human = property(lambda self: humanize(type(self).__name__)) + kind = property(lambda self: self._human.split()[-1]) + module = property(lambda self: self.parent.module) + all = property(lambda self: self.module.all) + _slice = property(lambda self: slice(self.start - 1, self.end)) + source = property(lambda self: ''.join(self._source[self._slice])) + + def __iter__(self): + return chain([self], *self.children) + + @property + def _publicity(self): + return {True: 'public', False: 'private'}[self.is_public] + + def __str__(self): + return 'in %s %s `%s`' % (self._publicity, self._human, self.name) + + +class Module(Definition): + + _fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring', + 'children', 'parent', '_all') + is_public = True + _nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s]) + module = property(lambda self: self) + all = property(lambda self: self._all) + + def __str__(self): + return 'at module level' + + +class Package(Module): + + """A package is a __init__.py module.""" + + +class Function(Definition): + + _nest = staticmethod(lambda s: {'def': NestedFunction, + 'class': NestedClass}[s]) + + @property + def is_public(self): + if self.all is not None: + return self.name in self.all + else: # TODO: are there any magic functions? not methods + return not self.name.startswith('_') or is_magic(self.name) + + +class NestedFunction(Function): + + is_public = False + + +class Method(Function): + + @property + def is_public(self): + # Check if we are a setter/deleter method, and mark as private if so. + for decorator in self.decorators: + # Given 'foo', match 'foo.bar' but not 'foobar' or 'sfoo' + if re(r"^{0}\.".format(self.name)).match(decorator.name): + return False + name_is_public = not self.name.startswith('_') or is_magic(self.name) + return self.parent.is_public and name_is_public + + +class Class(Definition): + + _nest = staticmethod(lambda s: {'def': Method, 'class': NestedClass}[s]) + is_public = Function.is_public + + +class NestedClass(Class): + + is_public = False + + +class Decorator(Value): + + """A decorator for function, method or class.""" + + _fields = 'name arguments'.split() + + +class TokenKind(int): + def __repr__(self): + return "tk.{}".format(tk.tok_name[self]) + + +class Token(Value): + + _fields = 'kind value start end source'.split() + + def __init__(self, *args): + super(Token, self).__init__(*args) + self.kind = TokenKind(self.kind) + + +class TokenStream(object): + + def __init__(self, filelike): + self._generator = tk.generate_tokens(filelike.readline) + self.current = Token(*next(self._generator, None)) + self.line = self.current.start[0] + + def move(self): + previous = self.current + current = next(self._generator, None) + self.current = None if current is None else Token(*current) + self.line = self.current.start[0] if self.current else self.line + return previous + + def __iter__(self): + while True: + if self.current is not None: + yield self.current + else: + return + self.move() + + +class AllError(Exception): + + def __init__(self, message): + Exception.__init__( + self, message + + 'That means pep257 cannot decide which definitions are public. ' + 'Variable __all__ should be present at most once in each file, ' + "in form `__all__ = ('a_public_function', 'APublicClass', ...)`. " + 'More info on __all__: http://stackoverflow.com/q/44834/. ') + + +class Parser(object): + + def __call__(self, filelike, filename): + self.source = filelike.readlines() + src = ''.join(self.source) + self.stream = TokenStream(StringIO(src)) + self.filename = filename + self.all = None + self._accumulated_decorators = [] + return self.parse_module() + + current = property(lambda self: self.stream.current) + line = property(lambda self: self.stream.line) + + def consume(self, kind): + assert self.stream.move().kind == kind + + def leapfrog(self, kind, value=None): + """Skip tokens in the stream until a certain token kind is reached. + + If `value` is specified, tokens whose values are different will also + be skipped. + """ + while self.current is not None: + if (self.current.kind == kind and + (value is None or self.current.value == value)): + self.consume(kind) + return + self.stream.move() + + def parse_docstring(self): + """Parse a single docstring and return its value.""" + log.debug("parsing docstring, token is %r (%s)", + self.current.kind, self.current.value) + while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL): + self.stream.move() + log.debug("parsing docstring, token is %r (%s)", + self.current.kind, self.current.value) + if self.current.kind == tk.STRING: + docstring = self.current.value + self.stream.move() + return docstring + return None + + def parse_decorators(self): + """Called after first @ is found. + + Parse decorators into self._accumulated_decorators. + Continue to do so until encountering the 'def' or 'class' start token. + """ + name = [] + arguments = [] + at_arguments = False + + while self.current is not None: + if (self.current.kind == tk.NAME and + self.current.value in ['def', 'class']): + # Done with decorators - found function or class proper + break + elif self.current.kind == tk.OP and self.current.value == '@': + # New decorator found. Store the decorator accumulated so far: + self._accumulated_decorators.append( + Decorator(''.join(name), ''.join(arguments))) + # Now reset to begin accumulating the new decorator: + name = [] + arguments = [] + at_arguments = False + elif self.current.kind == tk.OP and self.current.value == '(': + at_arguments = True + elif self.current.kind == tk.OP and self.current.value == ')': + # Ignore close parenthesis + pass + elif self.current.kind == tk.NEWLINE or self.current.kind == tk.NL: + # Ignore newlines + pass + else: + # Keep accumulating current decorator's name or argument. + if not at_arguments: + name.append(self.current.value) + else: + arguments.append(self.current.value) + self.stream.move() + + # Add decorator accumulated so far + self._accumulated_decorators.append( + Decorator(''.join(name), ''.join(arguments))) + + def parse_definitions(self, class_, all=False): + """Parse multiple defintions and yield them.""" + while self.current is not None: + log.debug("parsing defintion list, current token is %r (%s)", + self.current.kind, self.current.value) + if all and self.current.value == '__all__': + self.parse_all() + elif self.current.kind == tk.OP and self.current.value == '@': + self.consume(tk.OP) + self.parse_decorators() + elif self.current.value in ['def', 'class']: + yield self.parse_definition(class_._nest(self.current.value)) + elif self.current.kind == tk.INDENT: + self.consume(tk.INDENT) + for definition in self.parse_definitions(class_): + yield definition + elif self.current.kind == tk.DEDENT: + self.consume(tk.DEDENT) + return + else: + self.stream.move() + + def parse_all(self): + """Parse the __all__ definition in a module.""" + assert self.current.value == '__all__' + self.consume(tk.NAME) + if self.current.value != '=': + raise AllError('Could not evaluate contents of __all__. ') + self.consume(tk.OP) + if self.current.value not in '([': + raise AllError('Could not evaluate contents of __all__. ') + if self.current.value == '[': + msg = ("%s WARNING: __all__ is defined as a list, this means " + "pep257 cannot reliably detect contents of the __all__ " + "variable, because it can be mutated. Change __all__ to be " + "an (immutable) tuple, to remove this warning. Note, " + "pep257 uses __all__ to detect which definitions are " + "public, to warn if public definitions are missing " + "docstrings. If __all__ is a (mutable) list, pep257 cannot " + "reliably assume its contents. pep257 will proceed " + "assuming __all__ is not mutated.\n" % self.filename) + sys.stderr.write(msg) + self.consume(tk.OP) + + self.all = [] + all_content = "(" + while self.current.kind != tk.OP or self.current.value not in ")]": + if self.current.kind in (tk.NL, tk.COMMENT): + pass + elif (self.current.kind == tk.STRING or + self.current.value == ','): + all_content += self.current.value + else: + kind = token.tok_name[self.current.kind] + raise AllError('Unexpected token kind in __all__: %s' % kind) + self.stream.move() + self.consume(tk.OP) + all_content += ")" + try: + self.all = eval(all_content, {}) + except BaseException as e: + raise AllError('Could not evaluate contents of __all__.' + '\bThe value was %s. The exception was:\n%s' + % (all_content, e)) + + def parse_module(self): + """Parse a module (and its children) and return a Module object.""" + log.debug("parsing module.") + start = self.line + docstring = self.parse_docstring() + children = list(self.parse_definitions(Module, all=True)) + assert self.current is None, self.current + end = self.line + cls = Module + if self.filename.endswith('__init__.py'): + cls = Package + module = cls(self.filename, self.source, start, end, + [], docstring, children, None, self.all) + for child in module.children: + child.parent = module + log.debug("finished parsing module.") + return module + + def parse_definition(self, class_): + """Parse a defintion and return its value in a `class_` object.""" + start = self.line + self.consume(tk.NAME) + name = self.current.value + log.debug("parsing %s '%s'", class_.__name__, name) + self.stream.move() + if self.current.kind == tk.OP and self.current.value == '(': + parenthesis_level = 0 + while True: + if self.current.kind == tk.OP: + if self.current.value == '(': + parenthesis_level += 1 + elif self.current.value == ')': + parenthesis_level -= 1 + if parenthesis_level == 0: + break + self.stream.move() + if self.current.kind != tk.OP or self.current.value != ':': + self.leapfrog(tk.OP, value=":") + else: + self.consume(tk.OP) + if self.current.kind in (tk.NEWLINE, tk.COMMENT): + self.leapfrog(tk.INDENT) + assert self.current.kind != tk.INDENT + docstring = self.parse_docstring() + decorators = self._accumulated_decorators + self._accumulated_decorators = [] + log.debug("parsing nested defintions.") + children = list(self.parse_definitions(class_)) + log.debug("finished parsing nested defintions for '%s'", name) + end = self.line - 1 + else: # one-liner definition + docstring = self.parse_docstring() + decorators = [] # TODO + children = [] + end = self.line + self.leapfrog(tk.NEWLINE) + definition = class_(name, self.source, start, end, + decorators, docstring, children, None) + for child in definition.children: + child.parent = definition + log.debug("finished parsing %s '%s'. Next token is %r (%s)", + class_.__name__, name, self.current.kind, + self.current.value) + return definition + + +class Error(object): + + """Error in docstring style.""" + + # should be overridden by inheriting classes + code = None + short_desc = None + context = None + + # Options that define how errors are printed: + explain = False + source = False + + def __init__(self, *parameters): + self.parameters = parameters + self.definition = None + self.explanation = None + + def set_context(self, definition, explanation): + self.definition = definition + self.explanation = explanation + + filename = property(lambda self: self.definition.module.name) + line = property(lambda self: self.definition.start) + + @property + def message(self): + ret = '%s: %s' % (self.code, self.short_desc) + if self.context is not None: + ret += ' (' + self.context % self.parameters + ')' + return ret + + @property + def lines(self): + source = '' + lines = self.definition._source[self.definition._slice] + offset = self.definition.start + lines_stripped = list(reversed(list(dropwhile(is_blank, + reversed(lines))))) + numbers_width = 0 + for n, line in enumerate(lines_stripped): + numbers_width = max(numbers_width, n + offset) + numbers_width = len(str(numbers_width)) + numbers_width = 6 + for n, line in enumerate(lines_stripped): + source += '%*d: %s' % (numbers_width, n + offset, line) + if n > 5: + source += ' ...\n' + break + return source + + def __str__(self): + self.explanation = '\n'.join(l for l in self.explanation.split('\n') + if not is_blank(l)) + template = '%(filename)s:%(line)s %(definition)s:\n %(message)s' + if self.source and self.explain: + template += '\n\n%(explanation)s\n\n%(lines)s\n' + elif self.source and not self.explain: + template += '\n\n%(lines)s\n' + elif self.explain and not self.source: + template += '\n\n%(explanation)s\n\n' + return template % dict((name, getattr(self, name)) for name in + ['filename', 'line', 'definition', 'message', + 'explanation', 'lines']) + + __repr__ = __str__ + + def __lt__(self, other): + return (self.filename, self.line) < (other.filename, other.line) + + +class ErrorRegistry(object): + groups = [] + + class ErrorGroup(object): + + def __init__(self, prefix, name): + self.prefix = prefix + self.name = name + self.errors = [] + + def create_error(self, error_code, error_desc, error_context=None): + # TODO: check prefix + + class _Error(Error): + code = error_code + short_desc = error_desc + context = error_context + + self.errors.append(_Error) + return _Error + + @classmethod + def create_group(cls, prefix, name): + group = cls.ErrorGroup(prefix, name) + cls.groups.append(group) + return group + + @classmethod + def get_error_codes(cls): + for group in cls.groups: + for error in group.errors: + yield error.code + + @classmethod + def to_rst(cls): + sep_line = '+' + 6 * '-' + '+' + '-' * 71 + '+\n' + blank_line = '|' + 78 * ' ' + '|\n' + table = '' + for group in cls.groups: + table += sep_line + table += blank_line + table += '|' + ('**%s**' % group.name).center(78) + '|\n' + table += blank_line + for error in group.errors: + table += sep_line + table += ('|' + error.code.center(6) + '| ' + + error.short_desc.ljust(70) + '|\n') + table += sep_line + return table + + +D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings') +D100 = D1xx.create_error('D100', 'Missing docstring in public module') +D101 = D1xx.create_error('D101', 'Missing docstring in public class') +D102 = D1xx.create_error('D102', 'Missing docstring in public method') +D103 = D1xx.create_error('D103', 'Missing docstring in public function') +D104 = D1xx.create_error('D104', 'Missing docstring in public package') + +D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues') +D200 = D2xx.create_error('D200', 'One-line docstring should fit on one line ' + 'with quotes', 'found %s') +D201 = D2xx.create_error('D201', 'No blank lines allowed before function ' + 'docstring', 'found %s') +D202 = D2xx.create_error('D202', 'No blank lines allowed after function ' + 'docstring', 'found %s') +D203 = D2xx.create_error('D203', '1 blank line required before class ' + 'docstring', 'found %s') +D204 = D2xx.create_error('D204', '1 blank line required after class ' + 'docstring', 'found %s') +D205 = D2xx.create_error('D205', '1 blank line required between summary line ' + 'and description', 'found %s') +D206 = D2xx.create_error('D206', 'Docstring should be indented with spaces, ' + 'not tabs') +D207 = D2xx.create_error('D207', 'Docstring is under-indented') +D208 = D2xx.create_error('D208', 'Docstring is over-indented') +D209 = D2xx.create_error('D209', 'Multi-line docstring closing quotes should ' + 'be on a separate line') +D210 = D2xx.create_error('D210', 'No whitespaces allowed surrounding ' + 'docstring text') + +D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues') +D300 = D3xx.create_error('D300', 'Use """triple double quotes"""', + 'found %s-quotes') +D301 = D3xx.create_error('D301', 'Use r""" if any backslashes in a docstring') +D302 = D3xx.create_error('D302', 'Use u""" for Unicode docstrings') + +D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues') +D400 = D4xx.create_error('D400', 'First line should end with a period', + 'not %r') +D401 = D4xx.create_error('D401', 'First line should be in imperative mood', + '%r, not %r') +D402 = D4xx.create_error('D402', 'First line should not be the function\'s ' + '"signature"') + + +class Conventions(object): + pep257 = set(ErrorRegistry.get_error_codes()) + + +def get_option_parser(): + parser = OptionParser(version=__version__, + usage='Usage: pep257 [options] [...]') + parser.config_options = ('explain', 'source', 'ignore', 'match', 'select', + 'match-dir', 'debug', 'verbose', 'count', + 'convention') + option = parser.add_option + option('-e', '--explain', action='store_true', + help='show explanation of each error') + option('-s', '--source', action='store_true', + help='show source for each error') + option('--select', metavar='', default='', + help='choose the basic list of checked errors by specifying which ' + 'errors to check for (with a list of comma-separated error ' + 'codes). for example: --select=D101,D202') + option('--ignore', metavar='', default='', + help='choose the basic list of checked errors by specifying which ' + 'errors to ignore (with a list of comma-separated error ' + 'codes). for example: --ignore=D101,D202') + option('--convention', metavar='', default='', + help='choose the basic list of checked errors by specifying an ' + 'existing convention. for example: --convention=pep257') + option('--add-select', metavar='', default='', + help='amend the list of errors to check for by specifying more ' + 'error codes to check.') + option('--add-ignore', metavar='', default='', + help='amend the list of errors to check for by specifying more ' + 'error codes to ignore.') + option('--match', metavar='', default='(?!test_).*\.py', + help="check only files that exactly match regular " + "expression; default is --match='(?!test_).*\.py' which " + "matches files that don't start with 'test_' but end with " + "'.py'") + option('--match-dir', metavar='', default='[^\.].*', + help="search only dirs that exactly match regular " + "expression; default is --match-dir='[^\.].*', which matches " + "all dirs that don't start with a dot") + option('-d', '--debug', action='store_true', + help='print debug information') + option('-v', '--verbose', action='store_true', + help='print status information') + option('--count', action='store_true', + help='print total number of errors to stdout') + return parser + + +def collect(names, match=lambda name: True, match_dir=lambda name: True): + """Walk dir trees under `names` and generate filnames that `match`. + + Example + ------- + >>> sorted(collect(['non-dir.txt', './'], + ... match=lambda name: name.endswith('.py'))) + ['non-dir.txt', './pep257.py', './setup.py', './test_pep257.py'] + + """ + for name in names: # map(expanduser, names): + if os.path.isdir(name): + for root, dirs, filenames in os.walk(name): + # Skip any dirs that do not match match_dir + dirs[:] = [dir for dir in dirs if match_dir(dir)] + for filename in filenames: + if match(filename): + yield os.path.join(root, filename) + else: + yield name + + +def check(filenames, select=None, ignore=None): + """Generate PEP 257 errors that exist in `filenames` iterable. + + Only returns errors with error-codes defined in `checked_codes` iterable. + + Example + ------- + >>> check(['pep257.py'], checked_codes=['D100']) + + + """ + if select and ignore: + raise ValueError('Cannot pass both select and ignore. They are ' + 'mutually exclusive.') + elif select or ignore: + checked_codes = (select or + set(ErrorRegistry.get_error_codes()) - set(ignore)) + else: + checked_codes = Conventions.pep257 + + for filename in filenames: + log.info('Checking file %s.', filename) + try: + with tokenize_open(filename) as file: + source = file.read() + for error in PEP257Checker().check_source(source, filename): + code = getattr(error, 'code', None) + if code in checked_codes: + yield error + except (EnvironmentError, AllError): + yield sys.exc_info()[1] + except tk.TokenError: + yield SyntaxError('invalid syntax in file %s' % filename) + + +def get_options(args, opt_parser): + config = RawConfigParser() + parent = tail = os.path.abspath(os.path.commonprefix(args)) + config_found = False + while tail and not config_found: + log.info(tail) + for fn in PROJECT_CONFIG: + full_path = os.path.join(parent, fn) + if config.read(full_path): + log.info('local configuration: in %s.', full_path) + config_found = True + break + parent, tail = os.path.split(parent) + + new_options = None + if config.has_section('pep257'): + option_list = dict([(o.dest, o.type or o.action) + for o in opt_parser.option_list]) + + # First, read the default values + new_options, _ = opt_parser.parse_args([]) + + # Second, parse the configuration + pep257_section = 'pep257' + for opt in config.options(pep257_section): + if opt.replace('_', '-') not in opt_parser.config_options: + log.warning("Unknown option '{}' ignored".format(opt)) + continue + normalized_opt = opt.replace('-', '_') + opt_type = option_list[normalized_opt] + if opt_type in ('int', 'count'): + value = config.getint(pep257_section, opt) + elif opt_type == 'string': + value = config.get(pep257_section, opt) + else: + assert opt_type in ('store_true', 'store_false') + value = config.getboolean(pep257_section, opt) + setattr(new_options, normalized_opt, value) + + # Third, overwrite with the command-line options + options, _ = opt_parser.parse_args(values=new_options) + log.debug("options: %s", options) + return options + + +def setup_stream_handlers(options): + """Setup logging stream handlers according to the options.""" + class StdoutFilter(logging.Filter): + def filter(self, record): + return record.levelno in (logging.DEBUG, logging.INFO) + + if log.handlers: + for handler in log.handlers: + log.removeHandler(handler) + + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.setLevel(logging.WARNING) + stdout_handler.addFilter(StdoutFilter()) + if options.debug: + stdout_handler.setLevel(logging.DEBUG) + elif options.verbose: + stdout_handler.setLevel(logging.INFO) + else: + stdout_handler.setLevel(logging.WARNING) + log.addHandler(stdout_handler) + + stderr_handler = logging.StreamHandler(sys.stderr) + stderr_handler.setLevel(logging.WARNING) + log.addHandler(stderr_handler) + + +def get_checked_error_codes(options): + codes = set(ErrorRegistry.get_error_codes()) + if options.ignore: + checked_codes = codes - set(options.ignore.split(',')) + elif options.select: + checked_codes = set(options.select.split(',')) + elif options.convention: + checked_codes = getattr(Conventions, options.convention) + else: + checked_codes = Conventions.pep257 + checked_codes -= set(options.add_ignore.split(',')) + checked_codes |= set(options.add_select.split(',')) + return checked_codes - set('') + + +def validate_options(options): + mutually_exclusive = ('ignore', 'select', 'convention') + for opt1, opt2 in itertools.permutations(mutually_exclusive, 2): + if getattr(options, opt1) and getattr(options, opt2): + log.error('Cannot pass both {0} and {1}. They are ' + 'mutually exclusive.'.format(opt1, opt2)) + return False + if options.convention and not hasattr(Conventions, options.convention): + return False + return True + + +def run_pep257(): + log.setLevel(logging.DEBUG) + opt_parser = get_option_parser() + # setup the logger before parsing the config file, so that command line + # arguments for debug / verbose will be printed. + options, arguments = opt_parser.parse_args() + setup_stream_handlers(options) + # We parse the files before opening the config file, since it changes where + # we look for the file. + options = get_options(arguments, opt_parser) + if not validate_options(options): + return INVALID_OPTIONS_RETURN_CODE + # Setup the handler again with values from the config file. + setup_stream_handlers(options) + + collected = collect(arguments or ['.'], + match=re(options.match + '$').match, + match_dir=re(options.match_dir + '$').match) + + log.debug("starting pep257 in debug mode.") + + Error.explain = options.explain + Error.source = options.source + collected = list(collected) + checked_codes = get_checked_error_codes(options) + errors = check(collected, select=checked_codes) + code = NO_VIOLATIONS_RETURN_CODE + count = 0 + for error in errors: + sys.stderr.write('%s\n' % error) + code = VIOLATIONS_RETURN_CODE + count += 1 + if options.count: + print(count) + return code + + +parse = Parser() + + +def check_for(kind, terminal=False): + def decorator(f): + f._check_for = kind + f._terminal = terminal + return f + return decorator + + +class PEP257Checker(object): + + """Checker for PEP 257. + + D10x: Missing docstrings + D20x: Whitespace issues + D30x: Docstring formatting + D40x: Docstring content issues + + """ + + def check_source(self, source, filename): + module = parse(StringIO(source), filename) + for definition in module: + for check in self.checks: + terminate = False + if isinstance(definition, check._check_for): + error = check(None, definition, definition.docstring) + errors = error if hasattr(error, '__iter__') else [error] + for error in errors: + if error is not None: + partition = check.__doc__.partition('.\n') + message, _, explanation = partition + error.set_context(explanation=explanation, + definition=definition) + yield error + if check._terminal: + terminate = True + break + if terminate: + break + + @property + def checks(self): + all = [check for check in vars(type(self)).values() + if hasattr(check, '_check_for')] + return sorted(all, key=lambda check: not check._terminal) + + @check_for(Definition, terminal=True) + def check_docstring_missing(self, definition, docstring): + """D10{0,1,2,3}: Public definitions should have docstrings. + + All modules should normally have docstrings. [...] all functions and + classes exported by a module should also have docstrings. Public + methods (including the __init__ constructor) should also have + docstrings. + + Note: Public (exported) definitions are either those with names listed + in __all__ variable (if present), or those that do not start + with a single underscore. + + """ + if (not docstring and definition.is_public or + docstring and is_blank(eval(docstring))): + codes = {Module: D100, Class: D101, NestedClass: D101, + Method: D102, Function: D103, NestedFunction: D103, + Package: D104} + return codes[type(definition)]() + + @check_for(Definition) + def check_one_liners(self, definition, docstring): + """D200: One-liner docstrings should fit on one line with quotes. + + The closing quotes are on the same line as the opening quotes. + This looks better for one-liners. + + """ + if docstring: + lines = eval(docstring).split('\n') + if len(lines) > 1: + non_empty_lines = sum(1 for l in lines if not is_blank(l)) + if non_empty_lines == 1: + return D200(len(lines)) + + @check_for(Function) + def check_no_blank_before(self, function, docstring): # def + """D20{1,2}: No blank lines allowed around function/method docstring. + + There's no blank line either before or after the docstring. + + """ + # NOTE: This does not take comments into account. + # NOTE: This does not take into account functions with groups of code. + if docstring: + before, _, after = function.source.partition(docstring) + blanks_before = list(map(is_blank, before.split('\n')[:-1])) + blanks_after = list(map(is_blank, after.split('\n')[1:])) + blanks_before_count = sum(takewhile(bool, reversed(blanks_before))) + blanks_after_count = sum(takewhile(bool, blanks_after)) + if blanks_before_count != 0: + yield D201(blanks_before_count) + if not all(blanks_after) and blanks_after_count != 0: + yield D202(blanks_after_count) + + @check_for(Class) + def check_blank_before_after_class(slef, class_, docstring): + """D20{3,4}: Class docstring should have 1 blank line around them. + + Insert a blank line before and after all docstrings (one-line or + multi-line) that document a class -- generally speaking, the class's + methods are separated from each other by a single blank line, and the + docstring needs to be offset from the first method by a blank line; + for symmetry, put a blank line between the class header and the + docstring. + + """ + # NOTE: this gives false-positive in this case + # class Foo: + # + # """Docstring.""" + # + # + # # comment here + # def foo(): pass + if docstring: + before, _, after = class_.source.partition(docstring) + blanks_before = list(map(is_blank, before.split('\n')[:-1])) + blanks_after = list(map(is_blank, after.split('\n')[1:])) + blanks_before_count = sum(takewhile(bool, reversed(blanks_before))) + blanks_after_count = sum(takewhile(bool, blanks_after)) + if blanks_before_count != 1: + yield D203(blanks_before_count) + if not all(blanks_after) and blanks_after_count != 1: + yield D204(blanks_after_count) + + @check_for(Definition) + def check_blank_after_summary(self, definition, docstring): + """D205: Put one blank line between summary line and description. + + Multi-line docstrings consist of a summary line just like a one-line + docstring, followed by a blank line, followed by a more elaborate + description. The summary line may be used by automatic indexing tools; + it is important that it fits on one line and is separated from the + rest of the docstring by a blank line. + + """ + if docstring: + lines = eval(docstring).strip().split('\n') + if len(lines) > 1: + post_summary_blanks = list(map(is_blank, lines[1:])) + blanks_count = sum(takewhile(bool, post_summary_blanks)) + if blanks_count != 1: + return D205(blanks_count) + + @check_for(Definition) + def check_indent(self, definition, docstring): + """D20{6,7,8}: The entire docstring should be indented same as code. + + The entire docstring is indented the same as the quotes at its + first line. + + """ + if docstring: + before_docstring, _, _ = definition.source.partition(docstring) + _, _, indent = before_docstring.rpartition('\n') + lines = docstring.split('\n') + if len(lines) > 1: + lines = lines[1:] # First line does not need indent. + indents = [leading_space(l) for l in lines if not is_blank(l)] + if set(' \t') == set(''.join(indents) + indent): + yield D206() + if (len(indents) > 1 and min(indents[:-1]) > indent or + indents[-1] > indent): + yield D208() + if min(indents) < indent: + yield D207() + + @check_for(Definition) + def check_newline_after_last_paragraph(self, definition, docstring): + """D209: Put multi-line docstring closing quotes on separate line. + + Unless the entire docstring fits on a line, place the closing + quotes on a line by themselves. + + """ + if docstring: + lines = [l for l in eval(docstring).split('\n') if not is_blank(l)] + if len(lines) > 1: + if docstring.split("\n")[-1].strip() not in ['"""', "'''"]: + return D209() + + @check_for(Definition) + def check_surrounding_whitespaces(self, definition, docstring): + """D210: No whitespaces allowed surrounding docstring text.""" + if docstring: + lines = eval(docstring).split('\n') + if lines[0].startswith(' ') or \ + len(lines) == 1 and lines[0].endswith(' '): + return D210() + + @check_for(Definition) + def check_triple_double_quotes(self, definition, docstring): + r'''D300: Use """triple double quotes""". + + For consistency, always use """triple double quotes""" around + docstrings. Use r"""raw triple double quotes""" if you use any + backslashes in your docstrings. For Unicode docstrings, use + u"""Unicode triple-quoted strings""". + + Note: Exception to this is made if the docstring contains + """ quotes in its body. + + ''' + if docstring and '"""' in eval(docstring) and docstring.startswith( + ("'''", "r'''", "u'''", "ur'''")): + # Allow ''' quotes if docstring contains """, because otherwise """ + # quotes could not be expressed inside docstring. Not in PEP 257. + return + if docstring and not docstring.startswith( + ('"""', 'r"""', 'u"""', 'ur"""')): + quotes = "'''" if "'''" in docstring[:4] else "'" + return D300(quotes) + + @check_for(Definition) + def check_backslashes(self, definition, docstring): + r'''D301: Use r""" if any backslashes in a docstring. + + Use r"""raw triple double quotes""" if you use any backslashes + (\) in your docstrings. + + ''' + # Just check that docstring is raw, check_triple_double_quotes + # ensures the correct quotes. + if docstring and '\\' in docstring and not docstring.startswith( + ('r', 'ur')): + return D301() + + @check_for(Definition) + def check_unicode_docstring(self, definition, docstring): + r'''D302: Use u""" for docstrings with Unicode. + + For Unicode docstrings, use u"""Unicode triple-quoted strings""". + + ''' + # Just check that docstring is unicode, check_triple_double_quotes + # ensures the correct quotes. + if docstring and sys.version_info[0] <= 2: + if not is_ascii(docstring) and not docstring.startswith( + ('u', 'ur')): + return D302() + + @check_for(Definition) + def check_ends_with_period(self, definition, docstring): + """D400: First line should end with a period. + + The [first line of a] docstring is a phrase ending in a period. + + """ + if docstring: + summary_line = eval(docstring).strip().split('\n')[0] + if not summary_line.endswith('.'): + return D400(summary_line[-1]) + + @check_for(Function) + def check_imperative_mood(self, function, docstring): # def context + """D401: First line should be in imperative mood: 'Do', not 'Does'. + + [Docstring] prescribes the function or method's effect as a command: + ("Do this", "Return that"), not as a description; e.g. don't write + "Returns the pathname ...". + + """ + if docstring: + stripped = eval(docstring).strip() + if stripped: + first_word = stripped.split()[0] + if first_word.endswith('s') and not first_word.endswith('ss'): + return D401(first_word[:-1], first_word) + + @check_for(Function) + def check_no_signature(self, function, docstring): # def context + """D402: First line should not be function's or method's "signature". + + The one-line docstring should NOT be a "signature" reiterating the + function/method parameters (which can be obtained by introspection). + + """ + if docstring: + first_line = eval(docstring).strip().split('\n')[0] + if function.name + '(' in first_line.replace(' ', ''): + return D402() + + # Somewhat hard to determine if return value is mentioned. + # @check(Function) + def SKIP_check_return_type(self, function, docstring): + """D40x: Return value type should be mentioned. + + [T]he nature of the return value cannot be determined by + introspection, so it should be mentioned. + + """ + if docstring and function.returns_value: + if 'return' not in docstring.lower(): + return Error() + + +def main(): + try: + sys.exit(run_pep257()) + except KeyboardInterrupt: + pass + + +if __name__ == '__main__': + main() diff --git a/pymode/libs/pylama/lint/pylama_pep8/pep8.py b/pymode/libs/pep8.py similarity index 89% rename from pymode/libs/pylama/lint/pylama_pep8/pep8.py rename to pymode/libs/pep8.py index 10a3a155..34ce07ae 100644 --- a/pymode/libs/pylama/lint/pylama_pep8/pep8.py +++ b/pymode/libs/pep8.py @@ -2,6 +2,7 @@ # pep8.py - Check Python source code formatting, according to PEP 8 # Copyright (C) 2006-2009 Johann C. Rocholl # Copyright (C) 2009-2014 Florent Xicluna +# Copyright (C) 2014-2015 Ian Lee # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files @@ -46,8 +47,6 @@ """ from __future__ import with_statement -__version__ = '1.6.0a0' - import os import sys import re @@ -63,13 +62,21 @@ except ImportError: from ConfigParser import RawConfigParser -DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__' -DEFAULT_IGNORE = 'E123,E226,E24,E704' -if sys.platform == 'win32': - DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8') -else: - DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or - os.path.expanduser('~/.config'), 'pep8') +__version__ = '1.6.3a0' + +DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox' +DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704' +try: + if sys.platform == 'win32': + USER_CONFIG = os.path.expanduser(r'~\.pep8') + else: + USER_CONFIG = os.path.join( + os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), + 'pep8' + ) +except ImportError: + USER_CONFIG = None + PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8') TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite') MAX_LINE_LENGTH = 79 @@ -101,8 +108,9 @@ DOCSTRING_REGEX = re.compile(r'u?r?["\']') EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]') WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)') -COMPARE_SINGLETON_REGEX = re.compile(r'([=!]=)\s*(None|False|True)') -COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^[({ ]+\s+(in|is)\s') +COMPARE_SINGLETON_REGEX = re.compile(r'\b(None|False|True)?\s*([=!]=)' + r'\s*(?(1)|(None|False|True))\b') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^][)(}{ ]+\s+(in|is)\s') COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type' r'|\s*\(\s*([^)]*[^ )])\s*\))') KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS)) @@ -199,7 +207,6 @@ def maximum_line_length(physical_line, max_line_length, multiline): Reports error E501. """ - max_line_length = int(max_line_length) line = physical_line.rstrip() length = len(line) if length > max_line_length and not noqa(line): @@ -428,6 +435,7 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, indent_chances = {} last_indent = tokens[0][2] visual_indent = None + last_token_multiline = False # for each depth, memorize the visual indent column indent = [last_indent[1]] if verbose >= 3: @@ -507,8 +515,9 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing, yield start, "%s continuation line %s" % error # look for visual indenting - if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT) - and not indent[depth]): + if (parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth]): indent[depth] = start[1] indent_chances[start[1]] = True if verbose >= 4: @@ -681,7 +690,7 @@ def missing_whitespace_around_operator(logical_line, tokens): if need_space is True or need_space[1]: # A needed trailing space was not found yield prev_end, "E225 missing whitespace around operator" - else: + elif prev_text != '**': code, optype = 'E226', 'arithmetic' if prev_text == '%': code, optype = 'E228', 'modulo' @@ -749,6 +758,7 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): Okay: boolean(a != b) Okay: boolean(a <= b) Okay: boolean(a >= b) + Okay: def foo(arg: int = 42): E251: def complex(real, imag = 0.0): E251: return magic(r = real, i = imag) @@ -756,6 +766,8 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): parens = 0 no_space = False prev_end = None + annotated_func_arg = False + in_def = logical_line.startswith('def') message = "E251 unexpected spaces around keyword / parameter equals" for token_type, text, start, end, line in tokens: if token_type == tokenize.NL: @@ -764,15 +776,22 @@ def whitespace_around_named_parameter_equals(logical_line, tokens): no_space = False if start != prev_end: yield (prev_end, message) - elif token_type == tokenize.OP: + if token_type == tokenize.OP: if text == '(': parens += 1 elif text == ')': parens -= 1 - elif parens and text == '=': + elif in_def and text == ':' and parens == 1: + annotated_func_arg = True + elif parens and text == ',' and parens == 1: + annotated_func_arg = False + elif parens and text == '=' and not annotated_func_arg: no_space = True if start != prev_end: yield (prev_end, message) + if not parens: + annotated_func_arg = False + prev_end = end @@ -836,6 +855,56 @@ def imports_on_separate_lines(logical_line): yield found, "E401 multiple imports on one line" +def module_imports_on_top_of_file( + logical_line, indent_level, checker_state, noqa): + r"""Imports are always put at the top of the file, just after any module + comments and docstrings, and before module globals and constants. + + Okay: import os + Okay: # this is a comment\nimport os + Okay: '''this is a module docstring'''\nimport os + Okay: r'''this is a module docstring'''\nimport os + Okay: try:\n import x\nexcept:\n pass\nelse:\n pass\nimport y + Okay: try:\n import x\nexcept:\n pass\nfinally:\n pass\nimport y + E402: a=1\nimport os + E402: 'One string'\n"Two string"\nimport os + E402: a=1\nfrom sys import x + + Okay: if x:\n import os + """ + def is_string_literal(line): + if line[0] in 'uUbB': + line = line[1:] + if line and line[0] in 'rR': + line = line[1:] + return line and (line[0] == '"' or line[0] == "'") + + allowed_try_keywords = ('try', 'except', 'else', 'finally') + + if indent_level: # Allow imports in conditional statements or functions + return + if not logical_line: # Allow empty lines or comments + return + if noqa: + return + line = logical_line + if line.startswith('import ') or line.startswith('from '): + if checker_state.get('seen_non_imports', False): + yield 0, "E402 module level import not at top of file" + elif any(line.startswith(kw) for kw in allowed_try_keywords): + # Allow try, except, else, finally keywords intermixed with imports in + # order to support conditional importing + return + elif is_string_literal(line): + # The first literal is a docstring, allow it. Otherwise, report error. + if checker_state.get('seen_docstring', False): + checker_state['seen_non_imports'] = True + else: + checker_state['seen_docstring'] = True + else: + checker_state['seen_non_imports'] = True + + def compound_statements(logical_line): r"""Compound statements (on the same line) are generally discouraged. @@ -872,8 +941,12 @@ def compound_statements(logical_line): if ((before.count('{') <= before.count('}') and # {'a': 1} (dict) before.count('[') <= before.count(']') and # [1:2] (slice) before.count('(') <= before.count(')'))): # (annotation) - if LAMBDA_REGEX.search(before): - yield 0, "E731 do not assign a lambda expression, use a def" + lambda_kw = LAMBDA_REGEX.search(before) + if lambda_kw: + before = line[:lambda_kw.start()].rstrip() + if before[-1:] == '=' and isidentifier(before[:-1].strip()): + yield 0, ("E731 do not assign a lambda expression, use a " + "def") break if before.startswith('def '): yield 0, "E704 multiple statements on one line (def)" @@ -903,10 +976,15 @@ def explicit_line_join(logical_line, tokens): Okay: aaa = [123,\n 123] Okay: aaa = ("bbb "\n "ccc") Okay: aaa = "bbb " \\n "ccc" + Okay: aaa = 123 # \\ """ prev_start = prev_end = parens = 0 + comment = False + backslash = None for token_type, text, start, end, line in tokens: - if start[0] != prev_start and parens and backslash: + if token_type == tokenize.COMMENT: + comment = True + if start[0] != prev_start and parens and backslash and not comment: yield backslash, "E502 the backslash is redundant between brackets" if end[0] != prev_end: if line.rstrip('\r\n').endswith('\\'): @@ -923,6 +1001,45 @@ def explicit_line_join(logical_line, tokens): parens -= 1 +def break_around_binary_operator(logical_line, tokens): + r""" + Avoid breaks before binary operators. + + The preferred place to break around a binary operator is after the + operator, not before it. + + W503: (width == 0\n + height == 0) + W503: (width == 0\n and height == 0) + + Okay: (width == 0 +\n height == 0) + Okay: foo(\n -x) + Okay: foo(x\n []) + Okay: x = '''\n''' + '' + Okay: foo(x,\n -y) + Okay: foo(x, # comment\n -y) + """ + def is_binary_operator(token_type, text): + # The % character is strictly speaking a binary operator, but the + # common usage seems to be to put it next to the format parameters, + # after a line break. + return ((token_type == tokenize.OP or text in ['and', 'or']) and + text not in "()[]{},:.;@=%") + + line_break = False + unary_context = True + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + continue + if ('\n' in text or '\r' in text) and token_type != tokenize.STRING: + line_break = True + else: + if (is_binary_operator(token_type, text) and line_break and + not unary_context): + yield start, "W503 line break before binary operator" + unary_context = text in '([{,;' + line_break = False + + def comparison_to_singleton(logical_line, noqa): r"""Comparison to singletons should use "is" or "is not". @@ -931,7 +1048,9 @@ def comparison_to_singleton(logical_line, noqa): Okay: if arg is not None: E711: if arg != None: + E711: if None == arg: E712: if arg == True: + E712: if False == arg: Also, beware of writing if x when you really mean if x is not None -- e.g. when testing whether a variable or argument that defaults to None was @@ -940,8 +1059,9 @@ def comparison_to_singleton(logical_line, noqa): """ match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line) if match: - same = (match.group(1) == '==') - singleton = match.group(2) + singleton = match.group(1) or match.group(3) + same = (match.group(2) == '==') + msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton) if singleton in ('None',): code = 'E711' @@ -950,7 +1070,7 @@ def comparison_to_singleton(logical_line, noqa): nonzero = ((singleton == 'True' and same) or (singleton == 'False' and not same)) msg += " or 'if %scond:'" % ('' if nonzero else 'not ') - yield match.start(1), ("%s comparison to %s should be %s" % + yield match.start(2), ("%s comparison to %s should be %s" % (code, singleton, msg)) @@ -975,7 +1095,7 @@ def comparison_negative(logical_line): yield pos, "E714 test for object identity should be 'is not'" -def comparison_type(logical_line): +def comparison_type(logical_line, noqa): r"""Object type comparisons should always use isinstance(). Do not compare types directly. @@ -991,7 +1111,7 @@ def comparison_type(logical_line): Okay: if type(a1) is type(b1): """ match = COMPARE_TYPE_REGEX.search(logical_line) - if match: + if match and not noqa: inst = match.group(1) if inst and isidentifier(inst) and inst not in SINGLETONS: return # Allow comparison for types which are not obvious @@ -1057,7 +1177,7 @@ def readlines(filename): """Read the source code.""" with open(filename, 'rU') as f: return f.readlines() - isidentifier = re.compile(r'[a-zA-Z_]\w*').match + isidentifier = re.compile(r'[a-zA-Z_]\w*$').match stdin_get_value = sys.stdin.read else: # Python 3 @@ -1156,10 +1276,13 @@ def normalize_paths(value, parent=os.curdir): Return a list of absolute paths. """ - if not value or isinstance(value, list): + if not value: + return [] + if isinstance(value, list): return value paths = [] for path in value.split(','): + path = path.strip() if '/' in path: path = os.path.abspath(os.path.join(parent, path)) paths.append(path.rstrip('/')) @@ -1176,14 +1299,12 @@ def filename_match(filename, patterns, default=True): return any(fnmatch(filename, pattern) for pattern in patterns) +def _is_eol_token(token): + return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n' if COMMENT_WITH_NL: - def _is_eol_token(token): - return (token[0] in NEWLINE or - (token[0] == tokenize.COMMENT and token[1] == token[4])) -else: - def _is_eol_token(token): - return token[0] in NEWLINE - + def _is_eol_token(token, _eol_token=_is_eol_token): + return _eol_token(token) or (token[0] == tokenize.COMMENT and + token[1] == token[4]) ############################################################################## # Framework to run all checks @@ -1193,6 +1314,13 @@ def _is_eol_token(token): _checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}} +def _get_parameters(function): + if sys.version_info >= (3, 3): + return list(inspect.signature(function).parameters) + else: + return inspect.getargspec(function)[0] + + def register_check(check, codes=None): """Register a new check object.""" def _add_check(check, kind, codes, args): @@ -1201,13 +1329,13 @@ def _add_check(check, kind, codes, args): else: _checks[kind][check] = (codes or [''], args) if inspect.isfunction(check): - args = inspect.getargspec(check)[0] + args = _get_parameters(check) if args and args[0] in ('physical_line', 'logical_line'): if codes is None: codes = ERRORCODE_REGEX.findall(check.__doc__ or '') _add_check(check, args[0], codes, args) elif inspect.isclass(check): - if inspect.getargspec(check.__init__)[0][:2] == ['self', 'tree']: + if _get_parameters(check.__init__)[:2] == ['self', 'tree']: _add_check(check, 'tree', codes, None) @@ -1240,6 +1368,8 @@ def __init__(self, filename=None, lines=None, self.hang_closing = options.hang_closing self.verbose = options.verbose self.filename = filename + # Dictionary where a checker can store its custom state. + self._checker_states = {} if filename is None: self.filename = 'stdin' self.lines = lines or [] @@ -1295,10 +1425,16 @@ def run_check(self, check, argument_names): arguments.append(getattr(self, name)) return check(*arguments) + def init_checker_state(self, name, argument_names): + """ Prepares a custom state for the specific checker plugin.""" + if 'checker_state' in argument_names: + self.checker_state = self._checker_states.setdefault(name, {}) + def check_physical(self, line): """Run all physical checks on a raw input line.""" self.physical_line = line for name, check, argument_names in self._physical_checks: + self.init_checker_state(name, argument_names) result = self.run_check(check, argument_names) if result is not None: (offset, text) = result @@ -1326,8 +1462,8 @@ def build_tokens_line(self): (start_row, start_col) = start if prev_row != start_row: # different row prev_text = self.lines[prev_row - 1][prev_col - 1] - if prev_text == ',' or (prev_text not in '{[(' - and text not in '}])'): + if prev_text == ',' or (prev_text not in '{[(' and + text not in '}])'): text = ' ' + text elif prev_col != start_col: # different column text = line[prev_col:start_col] + text @@ -1343,6 +1479,10 @@ def check_logical(self): """Build a line from tokens and run all logical checks on it.""" self.report.increment_logical_line() mapping = self.build_tokens_line() + + if not mapping: + return + (start_row, start_col) = mapping[0][1] start_line = self.lines[start_row - 1] self.indent_level = expand_indent(start_line[:start_col]) @@ -1353,6 +1493,7 @@ def check_logical(self): for name, check, argument_names in self._logical_checks: if self.verbose >= 4: print(' ' + name) + self.init_checker_state(name, argument_names) for offset, text in self.run_check(check, argument_names) or (): if not isinstance(offset, tuple): for token_offset, pos in mapping: @@ -1370,7 +1511,7 @@ def check_ast(self): """Build the file's AST and run all AST checks.""" try: tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) - except (SyntaxError, TypeError): + except (ValueError, SyntaxError, TypeError): return self.report_invalid_syntax() for name, cls, __ in self._ast_checks: checker = cls(tree, self.filename) @@ -1614,6 +1755,14 @@ def get_file_results(self): print(re.sub(r'\S', ' ', line[:offset]) + '^') if self._show_pep8 and doc: print(' ' + doc.strip()) + + # stdout is block buffered when not stdout.isatty(). + # line can be broken where buffer boundary since other processes + # write to same file. + # flush() after print() to avoid buffer boundary. + # Typical buffer size is 8192. line written safely when + # len(line) < 8192. + sys.stdout.flush() return self.file_errors @@ -1637,7 +1786,7 @@ def __init__(self, *args, **kwargs): # build options from the command line self.checker_class = kwargs.pop('checker_class', Checker) parse_argv = kwargs.pop('parse_argv', False) - config_file = kwargs.pop('config_file', None) + config_file = kwargs.pop('config_file', False) parser = kwargs.pop('parser', None) # build options from dict options_dict = dict(*args, **kwargs) @@ -1790,7 +1939,8 @@ def get_parser(prog='pep8', version=__version__): parser.add_option('--select', metavar='errors', default='', help="select errors and warnings (e.g. E,W6)") parser.add_option('--ignore', metavar='errors', default='', - help="skip errors and warnings (e.g. E4,W)") + help="skip errors and warnings (e.g. E4,W) " + "(default: %s)" % DEFAULT_IGNORE) parser.add_option('--show-source', action='store_true', help="show source code for each error") parser.add_option('--show-pep8', action='store_true', @@ -1826,25 +1976,40 @@ def get_parser(prog='pep8', version=__version__): def read_config(options, args, arglist, parser): - """Read both user configuration and local configuration.""" + """Read and parse configurations + + If a config file is specified on the command line with the "--config" + option, then only it is used for configuration. + + Otherwise, the user configuration (~/.config/pep8) and any local + configurations in the current directory or above will be merged together + (in that order) using the read method of ConfigParser. + """ config = RawConfigParser() - user_conf = options.config - if user_conf and os.path.isfile(user_conf): - if options.verbose: - print('user configuration: %s' % user_conf) - config.read(user_conf) + cli_conf = options.config local_dir = os.curdir + + if USER_CONFIG and os.path.isfile(USER_CONFIG): + if options.verbose: + print('user configuration: %s' % USER_CONFIG) + config.read(USER_CONFIG) + parent = tail = args and os.path.abspath(os.path.commonprefix(args)) while tail: - if config.read([os.path.join(parent, fn) for fn in PROJECT_CONFIG]): + if config.read(os.path.join(parent, fn) for fn in PROJECT_CONFIG): local_dir = parent if options.verbose: print('local configuration: in %s' % parent) break (parent, tail) = os.path.split(parent) + if cli_conf and os.path.isfile(cli_conf): + if options.verbose: + print('cli configuration: %s' % cli_conf) + config.read(cli_conf) + pep8_section = parser.prog if config.has_section(pep8_section): option_list = dict([(o.dest, o.type or o.action) @@ -1881,19 +2046,21 @@ def read_config(options, args, arglist, parser): def process_options(arglist=None, parse_argv=False, config_file=None, parser=None): - """Process options passed either via arglist or via command line args.""" + """Process options passed either via arglist or via command line args. + + Passing in the ``config_file`` parameter allows other tools, such as flake8 + to specify their own options to be processed in pep8. + """ if not parser: parser = get_parser() if not parser.has_option('--config'): - if config_file is True: - config_file = DEFAULT_CONFIG group = parser.add_option_group("Configuration", description=( "The project options are read from the [%s] section of the " "tox.ini file or the setup.cfg file located in any parent folder " "of the path(s) being processed. Allowed options are: %s." % (parser.prog, ', '.join(parser.config_options)))) group.add_option('--config', metavar='path', default=config_file, - help="user config file location (default: %default)") + help="user config file location") # Don't read the command line if the module is used as a library. if not arglist and not parse_argv: arglist = [] @@ -1938,7 +2105,7 @@ def _main(): except AttributeError: pass # not supported on Windows - pep8style = StyleGuide(parse_argv=True, config_file=True) + pep8style = StyleGuide(parse_argv=True) options = pep8style.options if options.doctest or options.testsuite: from testsuite.support import run_tests diff --git a/pymode/libs/pkg_resources/__init__.py b/pymode/libs/pkg_resources/__init__.py new file mode 100644 index 00000000..42ddcf7c --- /dev/null +++ b/pymode/libs/pkg_resources/__init__.py @@ -0,0 +1,3113 @@ +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +from __future__ import absolute_import + +import sys +import os +import io +import time +import re +import types +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import token +import symbol +import operator +import platform +import collections +import plistlib +import email.parser +import tempfile +import textwrap +from pkgutil import get_importer + +try: + import _imp +except ImportError: + # Python 3.2 compatibility + import imp as _imp + +PY3 = sys.version_info > (3,) +PY2 = not PY3 + +if PY3: + from urllib.parse import urlparse, urlunparse + +if PY2: + from urlparse import urlparse, urlunparse + +if PY3: + string_types = str, +else: + string_types = str, eval('unicode') + +iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems() + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +# Avoid try/except due to potential problems with delayed import mechanisms. +if sys.version_info >= (3, 3) and sys.implementation.name == "cpython": + import importlib.machinery as importlib_machinery +else: + importlib_machinery = None + +try: + import parser +except ImportError: + pass + +try: + import pkg_resources._vendor.packaging.version + import pkg_resources._vendor.packaging.specifiers + packaging = pkg_resources._vendor.packaging +except ImportError: + # fallback to naturally-installed version; allows system packagers to + # omit vendored packages. + import packaging.version + import packaging.specifiers + + +# declare some globals that will be defined later to +# satisfy the linters. +require = None +working_set = None + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +class _SetuptoolsVersionMixin(object): + + def __hash__(self): + return super(_SetuptoolsVersionMixin, self).__hash__() + + def __lt__(self, other): + if isinstance(other, tuple): + return tuple(self) < other + else: + return super(_SetuptoolsVersionMixin, self).__lt__(other) + + def __le__(self, other): + if isinstance(other, tuple): + return tuple(self) <= other + else: + return super(_SetuptoolsVersionMixin, self).__le__(other) + + def __eq__(self, other): + if isinstance(other, tuple): + return tuple(self) == other + else: + return super(_SetuptoolsVersionMixin, self).__eq__(other) + + def __ge__(self, other): + if isinstance(other, tuple): + return tuple(self) >= other + else: + return super(_SetuptoolsVersionMixin, self).__ge__(other) + + def __gt__(self, other): + if isinstance(other, tuple): + return tuple(self) > other + else: + return super(_SetuptoolsVersionMixin, self).__gt__(other) + + def __ne__(self, other): + if isinstance(other, tuple): + return tuple(self) != other + else: + return super(_SetuptoolsVersionMixin, self).__ne__(other) + + def __getitem__(self, key): + return tuple(self)[key] + + def __iter__(self): + component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) + replace = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + }.get + + def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part == '.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*'+part + + # ensure that alpha/beta/candidate are before final + yield '*final' + + def old_parse_version(s): + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == '00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + # Warn for use of this function + warnings.warn( + "You have iterated over the result of " + "pkg_resources.parse_version. This is a legacy behavior which is " + "inconsistent with the new version class introduced in setuptools " + "8.0. In most cases, conversion to a tuple is unnecessary. For " + "comparison of versions, sort the Version instances directly. If " + "you have another use case requiring the tuple, please file a " + "bug with the setuptools project describing that need.", + RuntimeWarning, + stacklevel=1, + ) + + for part in old_parse_version(str(self)): + yield part + + +class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): + pass + + +class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, + packaging.version.LegacyVersion): + pass + + +def parse_version(v): + try: + return SetuptoolsVersion(v) + except packaging.version.InvalidVersion: + return SetuptoolsLegacyVersion(v) + + +_state_vars = {} + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_'+v](g[k]) + return state + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_'+_state_vars[k]](k, g[k], v) + return state + +def _sget_dict(val): + return val.copy() + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + +def _sget_object(val): + return val.__getstate__() + +def _sset_object(key, ob, state): + ob.__setstate__(state) + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + # not Mac OS X + pass + return plat + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + def __repr__(self): + return self.__class__.__name__+repr(self.args) + + +class VersionConflict(ResolutionError): + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") + + @property + def req(self): + return self.args[0] + + @property + def requirers(self): + return self.args[1] + + @property + def requirers_str(self): + if not self.requirers: + return 'the application' + return ', '.join(self.requirers) + + def report(self): + return self._template.format(**locals()) + + def __str__(self): + return self.report() + + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" +_provider_factories = {} + +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + +def _macosx_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + +def _macosx_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + try: + # Python 2.7 or >=3.2 + from sysconfig import get_platform + except ImportError: + from distutils.util import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided==required: + # easy case + return True + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macosx or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + +# backward compatibility +run_main = run_script + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in entries.values(): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key]=1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry,[]) + keys2 = self.entry_keys.setdefault(dist.location,[]) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, + replace_conflicting=False): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match(req, ws, installer) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins(self, plugin_env, full_env=None, installer=None, + fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback): + """Invoke `callback` for all distributions (including existing ones)""" + if callback in self.callbacks: + return + self.callbacks.append(callback) + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.3'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version==self.python) \ + and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match(self, req, working_set, installer=None): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + dist = working_set.find(req) + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + err = ExtractionError("""Can't extract file(s) to egg cache + +The following error occurred while trying to extract file(s) to the Python egg +cache: + + %s + +The Python egg cache directory is currently set to: + + %s + +Perhaps your account does not have write access to this directory? You can +change the cache directory by setting the PYTHON_EGG_CACHE environment +variable to point to an accessible directory. +""" % (old_exc, cache_path) + ) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ("%s is writable by group/others and vulnerable to attack " + "when " + "used with get_resource_filename. Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." % path) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + +def get_default_cache(): + """Determine the default cache location + + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the + "Application Data" directory. On all other systems, it's "~/.python-eggs". + """ + try: + return os.environ['PYTHON_EGG_CACHE'] + except KeyError: + pass + + if os.name!='nt': + return os.path.expanduser('~/.python-eggs') + + # XXX this may be locale-specific! + app_data = 'Application Data' + app_homes = [ + # best option, should be locale-safe + (('APPDATA',), None), + (('USERPROFILE',), app_data), + (('HOMEDRIVE','HOMEPATH'), app_data), + (('HOMEPATH',), app_data), + (('HOME',), None), + # 95/98/ME + (('WINDIR',), app_data), + ] + + for keys, subdir in app_homes: + dirname = '' + for key in keys: + if key in os.environ: + dirname = os.path.join(dirname, os.environ[key]) + else: + break + else: + if subdir: + dirname = os.path.join(dirname, subdir) + return os.path.join(dirname, 'Python-Eggs') + else: + raise RuntimeError( + "Please set the PYTHON_EGG_CACHE enviroment variable" + ) + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') + + +class MarkerEvaluation(object): + values = { + 'os_name': lambda: os.name, + 'sys_platform': lambda: sys.platform, + 'python_full_version': platform.python_version, + 'python_version': lambda: platform.python_version()[:3], + 'platform_version': platform.version, + 'platform_machine': platform.machine, + 'python_implementation': platform.python_implementation, + } + + @classmethod + def is_invalid_marker(cls, text): + """ + Validate text as a PEP 426 environment marker; return an exception + if invalid or False otherwise. + """ + try: + cls.evaluate_marker(text) + except SyntaxError as e: + return cls.normalize_exception(e) + return False + + @staticmethod + def normalize_exception(exc): + """ + Given a SyntaxError from a marker evaluation, normalize the error + message: + - Remove indications of filename and line number. + - Replace platform-specific error messages with standard error + messages. + """ + subs = { + 'unexpected EOF while parsing': 'invalid syntax', + 'parenthesis is never closed': 'invalid syntax', + } + exc.filename = None + exc.lineno = None + exc.msg = subs.get(exc.msg, exc.msg) + return exc + + @classmethod + def and_test(cls, nodelist): + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.and_, items) + + @classmethod + def test(cls, nodelist): + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.or_, items) + + @classmethod + def atom(cls, nodelist): + t = nodelist[1][0] + if t == token.LPAR: + if nodelist[2][0] == token.RPAR: + raise SyntaxError("Empty parentheses") + return cls.interpret(nodelist[2]) + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @classmethod + def comparison(cls, nodelist): + if len(nodelist) > 4: + msg = "Chained comparison not allowed in environment markers" + raise SyntaxError(msg) + comp = nodelist[2][1] + cop = comp[1] + if comp[0] == token.NAME: + if len(nodelist[2]) == 3: + if cop == 'not': + cop = 'not in' + else: + cop = 'is not' + try: + cop = cls.get_op(cop) + except KeyError: + msg = repr(cop) + " operator not allowed in environment markers" + raise SyntaxError(msg) + return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) + + @classmethod + def get_op(cls, op): + ops = { + symbol.test: cls.test, + symbol.and_test: cls.and_test, + symbol.atom: cls.atom, + symbol.comparison: cls.comparison, + 'not in': lambda x, y: x not in y, + 'in': lambda x, y: x in y, + '==': operator.eq, + '!=': operator.ne, + '<': operator.lt, + '>': operator.gt, + '<=': operator.le, + '>=': operator.ge, + } + if hasattr(symbol, 'or_test'): + ops[symbol.or_test] = cls.test + return ops[op] + + @classmethod + def evaluate_marker(cls, text, extra=None): + """ + Evaluate a PEP 426 environment marker on CPython 2.4+. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'parser' module, which is not implemented + on + Jython and has been superseded by the 'ast' module in Python 2.6 and + later. + """ + return cls.interpret(parser.expr(text).totuple(1)[1]) + + @classmethod + def _markerlib_evaluate(cls, text): + """ + Evaluate a PEP 426 environment marker using markerlib. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + """ + import _markerlib + # markerlib implements Metadata 1.2 (PEP 345) environment markers. + # Translate the variables to Metadata 2.0 (PEP 426). + env = _markerlib.default_environment() + for key in env.keys(): + new_key = key.replace('.', '_') + env[new_key] = env.pop(key) + try: + result = _markerlib.interpret(text, env) + except NameError as e: + raise SyntaxError(e.args[0]) + return result + + if 'parser' not in globals(): + # Fall back to less-complete _markerlib implementation if 'parser' module + # is not available. + evaluate_marker = _markerlib_evaluate + + @classmethod + def interpret(cls, nodelist): + while len(nodelist)==2: nodelist = nodelist[1] + try: + op = cls.get_op(nodelist[0]) + except KeyError: + raise SyntaxError("Comparison or logical expression expected") + return op(nodelist) + + @classmethod + def evaluate(cls, nodelist): + while len(nodelist)==2: nodelist = nodelist[1] + kind = nodelist[0] + name = nodelist[1] + if kind==token.NAME: + try: + op = cls.values[name] + except KeyError: + raise SyntaxError("Unknown name %r" % name) + return op() + if kind==token.STRING: + s = nodelist[1] + if not cls._safe_string(s): + raise SyntaxError( + "Only plain strings allowed in environment markers") + return s[1:-1] + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @staticmethod + def _safe_string(cand): + return ( + cand[:1] in "'\"" and + not cand.startswith('"""') and + not cand.startswith("'''") and + '\\' not in cand + ) + +invalid_marker = MarkerEvaluation.is_invalid_marker +evaluate_marker = MarkerEvaluation.evaluate_marker + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return io.BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info, name)) + + if sys.version_info <= (3,): + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)) + else: + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)).decode("utf-8") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/'+script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename,'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path!=old: + if path.lower().endswith('.egg'): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + +register_loader_type(type(None), DefaultProvider) + +if importlib_machinery is not None: + register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider) + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] + module_path = None + + def __init__(self): + pass + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = MemoizedZipManifests() + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive+os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre+zip_path + if fspath.startswith(self.egg_root+os.sep): + return fspath[len(self.egg_root)+1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name=='nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size!=size or stat.st_mtime!=timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def has_metadata(self, name): + return name=='PKG-INFO' + + def get_metadata(self, name): + if name=='PKG-INFO': + with open(self.path,'rU') as f: + metadata = f.read() + return metadata + raise KeyError("No metadata except PKG-INFO is available") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive+os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + +_declare_state('dict', _distribution_finders = {}) + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir('/'): + if subitem.endswith('.egg'): + subpath = os.path.join(path_item, subitem) + for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + +def find_nothing(importer, path_item, only=False): + return () +register_finder(object, find_nothing) + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if os.path.isdir(path_item) and os.access(path_item, os.R_OK): + if path_item.lower().endswith('.egg'): + # unpacked egg + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item,'EGG-INFO') + ) + ) + else: + # scan for .egg and .egg-info in directory + for entry in os.listdir(path_item): + lower = entry.lower() + if lower.endswith('.egg-info') or lower.endswith('.dist-info'): + fullpath = os.path.join(path_item, entry) + if os.path.isdir(fullpath): + # egg-info directory, allow getting metadata + metadata = PathMetadata(path_item, fullpath) + else: + metadata = FileMetadata(fullpath) + yield Distribution.from_location( + path_item, entry, metadata, precedence=DEVELOP_DIST + ) + elif not only and lower.endswith('.egg'): + dists = find_distributions(os.path.join(path_item, entry)) + for dist in dists: + yield dist + elif not only and lower.endswith('.egg-link'): + with open(os.path.join(path_item, entry)) as entry_file: + entry_lines = entry_file.readlines() + for line in entry_lines: + if not line.strip(): + continue + path = os.path.join(path_item, line.rstrip()) + dists = find_distributions(path) + for item in dists: + yield item + break +register_finder(pkgutil.ImpImporter, find_on_path) + +if importlib_machinery is not None: + register_finder(importlib_machinery.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module,'__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + loader.load_module(packageName) + for path_item in path: + if path_item not in module.__path__: + module.__path__.append(path_item) + return subpath + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + _imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent,[]).append(packageName) + _namespace_packages.setdefault(packageName,[]) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + _imp.release_lock() + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + _imp.acquire_lock() + try: + for package in _namespace_packages.get(parent,()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + _imp.release_lock() + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item)==normalized: + break + else: + # Only return the path if it's not already there + return subpath + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if importlib_machinery is not None: + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, string_types): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + +# whitespace and comment +LINE_END = re.compile(r"\s*(#.*)?$").match +# line continuation +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match +# Distribution or extra +DISTRO = re.compile(r"\s*((\w|[-.])+)").match +# ver. info +VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match +# comma between items +COMMA = re.compile(r"\s*,").match +OBRACKET = re.compile(r"\s*\[").match +CBRACKET = re.compile(r"\s*\]").match +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r""" + (?P[^-]+) ( + -(?P[^-]+) ( + -py(?P[^-]+) ( + -(?P.+) + )? + )? + )? + """, + re.VERBOSE | re.IGNORECASE, +).match + + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + DeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P.+?)\s*' + r'=\s*' + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name]=ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urlparse(location) + if parsed[-1].startswith('md5='): + return urlunparse(parsed[:-1] + ('',)) + return location + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__(self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None,**kw): + project_name, version, py_version, platform = [None]*4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + # .dist-info gets much metadata differently + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name','ver','pyver','plat' + ) + cls = _distributionImpl[ext.lower()] + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + ) + + @property + def hashcmp(self): + return ( + self.parsed_version, + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version or '', + self.platform or '', + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + + return self._parsed_version + + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return + + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return + + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') + + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + + @property + def version(self): + try: + return self._version + except AttributeError: + for line in self._get_metadata(self.PKG_INFO): + if line.lower().startswith('version:'): + self._version = safe_version(line.split(':',1)[1].strip()) + return self._version + else: + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + if extra: + if ':' in extra: + extra, marker = extra.split(':', 1) + if invalid_marker(marker): + # XXX warn + reqs=[] + elif not evaluate_marker(marker): + reqs=[] + extra = safe_extra(extra) or None + dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self, path=None): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group,{}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + def insert_on(self, path, loc = None): + """Insert self.location in path before its nearest parent directory""" + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath= [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + break + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p+1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self,**kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class DistInfoDistribution(Distribution): + """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _preparse_requirement(self, requires_dist): + """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') + Split environment marker, add == prefix to version specifiers as + necessary, and remove parenthesis. + """ + parts = requires_dist.split(';', 1) + [''] + distvers = parts[0].strip() + mark = parts[1].strip() + distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) + distvers = distvers.replace('(', '').replace(')', '') + return (distvers, mark) + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + from _markerlib import compile as compile_marker + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + distvers, mark = self._preparse_requirement(req) + parsed = next(parse_requirements(distvers)) + parsed.marker_fn = compile_marker(mark) + reqs.append(parsed) + + def reqs_for_extra(extra): + for req in reqs: + if req.marker_fn(override={'extra':extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + extra = safe_extra(extra.strip()) + dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': Distribution, + '.dist-info': DistInfoDistribution, + } + + +def issue_warning(*args,**kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +class RequirementParseError(ValueError): + def __str__(self): + return ' '.join(self.args) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): + + items = [] + + while not TERMINATOR(line, p): + if CONTINUE(line, p): + try: + line = next(lines) + p = 0 + except StopIteration: + msg = "\\ must not appear on the last nonblank line" + raise RequirementParseError(msg) + + match = ITEM(line, p) + if not match: + msg = "Expected " + item_name + " in" + raise RequirementParseError(msg, line, "at", line[p:]) + + items.append(match.group(*groups)) + p = match.end() + + match = COMMA(line, p) + if match: + # skip the comma + p = match.end() + elif not TERMINATOR(line, p): + msg = "Expected ',' or end-of-list in" + raise RequirementParseError(msg, line, "at", line[p:]) + + match = TERMINATOR(line, p) + # skip the terminator, if any + if match: + p = match.end() + return line, p, items + + for line in lines: + match = DISTRO(line) + if not match: + raise RequirementParseError("Missing distribution spec", line) + project_name = match.group(1) + p = match.end() + extras = [] + + match = OBRACKET(line, p) + if match: + p = match.end() + line, p, extras = scan_list( + DISTRO, CBRACKET, line, p, (1,), "'extra' name" + ) + + line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), + "version spec") + specs = [(op, val) for op, val in specs] + yield Requirement(project_name, specs, extras) + + +class Requirement: + def __init__(self, project_name, specs, extras): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + self.unsafe_name, project_name = project_name, safe_name(project_name) + self.project_name, self.key = project_name, project_name.lower() + self.specifier = packaging.specifiers.SpecifierSet( + ",".join(["".join([x, y]) for x, y in specs]) + ) + self.specs = specs + self.extras = tuple(map(safe_extra, extras)) + self.hashCmp = ( + self.key, + self.specifier, + frozenset(self.extras), + ) + self.__hash = hash(self.hashCmp) + + def __str__(self): + extras = ','.join(self.extras) + if extras: + extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, self.specifier) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __ne__(self, other): + return not self == other + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + reqs = list(parse_requirements(s)) + if reqs: + if len(reqs) == 1: + return reqs[0] + raise ValueError("Expected only one requirement", s) + raise ValueError("No requirements found", s) + + +def _get_mro(cls): + """Get an mro for a type or classic class""" + if not isinstance(cls, type): + class cls(cls, object): pass + return cls.__mro__[1:] + return cls.__mro__ + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + for t in _get_mro(getattr(ob, '__class__', type(ob))): + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + + +def _bypass_ensure_directory(path): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, 0o755) + + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + +def _mkstemp(*args,**kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args,**kw) + finally: + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) + + +# from jaraco.functools 1.3 +def _call_aside(f, *args, **kwargs): + f(*args, **kwargs) + return f + + +@_call_aside +def _initialize(g=globals()): + "Set up global resource manager (deliberately not state-saved)" + manager = ResourceManager() + g['_manager'] = manager + for name in dir(manager): + if not name.startswith('_'): + g[name] = getattr(manager, name) + + +@_call_aside +def _initialize_master_working_set(): + """ + Prepare the master working set and make the ``require()`` + API available. + + This function has explicit effects on the global state + of pkg_resources. It is intended to be invoked once at + the initialization of this module. + + Invocation by other packages is unsupported and done + at their own risk. + """ + working_set = WorkingSet._build_master() + _declare_state('object', working_set=working_set) + + require = working_set.require + iter_entry_points = working_set.iter_entry_points + add_activation_listener = working_set.subscribe + run_script = working_set.run_script + # backward compatibility + run_main = run_script + # Activate all distributions already on sys.path, and ensure that + # all distributions added to the working set in the future (e.g. by + # calling ``require()``) will get activated as well. + add_activation_listener(lambda dist: dist.activate()) + working_set.entries=[] + # match order + list(map(working_set.add_entry, sys.path)) + globals().update(locals()) diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/__init__.py b/pymode/libs/pkg_resources/_vendor/__init__.py similarity index 100% rename from pymode/libs/pylama/lint/pylama_pylint/logilab/__init__.py rename to pymode/libs/pkg_resources/_vendor/__init__.py diff --git a/pymode/libs/pkg_resources/_vendor/packaging/__about__.py b/pymode/libs/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 00000000..eadb794e --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,31 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "15.3" + +__author__ = "Donald Stufft" +__email__ = "donald@stufft.io" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2014 %s" % __author__ diff --git a/pymode/libs/pkg_resources/_vendor/packaging/__init__.py b/pymode/libs/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 00000000..c39a8eab --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/pymode/libs/pkg_resources/_vendor/packaging/_compat.py b/pymode/libs/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 00000000..5c396cea --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,40 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/pymode/libs/pkg_resources/_vendor/packaging/_structures.py b/pymode/libs/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 00000000..0ae9bb52 --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,78 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py b/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 00000000..891664f0 --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,784 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease + and not (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^\s]* # We just match everything, except for whitespace since this + # is a "legacy" specifier and the version string can be just + # about anything. + ) + \s* + $ + """, + re.VERBOSE | re.IGNORECASE, + ) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") + and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) + and self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split):]) + right_split.append(left[len(right_split):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/pymode/libs/pkg_resources/_vendor/packaging/version.py b/pymode/libs/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 00000000..4ba574b9 --- /dev/null +++ b/pymode/libs/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,403 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                              # pre-release
    +            [-_\.]?
    +            (?P(a|b|c|rc|alpha|beta|pre|preview))
    +            [-_\.]?
    +            (?P[0-9]+)?
    +        )?
    +        (?P                                         # post release
    +            (?:-(?P[0-9]+))
    +            |
    +            (?:
    +                [-_\.]?
    +                (?Ppost|rev|r)
    +                [-_\.]?
    +                (?P[0-9]+)?
    +            )
    +        )?
    +        (?P                                          # dev release
    +            [-_\.]?
    +            (?Pdev)
    +            [-_\.]?
    +            (?P[0-9]+)?
    +        )?
    +    )
    +    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
    +"""
    +
    +
    +class Version(_BaseVersion):
    +
    +    _regex = re.compile(
    +        r"^\s*" + VERSION_PATTERN + r"\s*$",
    +        re.VERBOSE | re.IGNORECASE,
    +    )
    +
    +    def __init__(self, version):
    +        # Validate the version and parse it into pieces
    +        match = self._regex.search(version)
    +        if not match:
    +            raise InvalidVersion("Invalid version: '{0}'".format(version))
    +
    +        # Store the parsed out pieces of the version
    +        self._version = _Version(
    +            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
    +            release=tuple(int(i) for i in match.group("release").split(".")),
    +            pre=_parse_letter_version(
    +                match.group("pre_l"),
    +                match.group("pre_n"),
    +            ),
    +            post=_parse_letter_version(
    +                match.group("post_l"),
    +                match.group("post_n1") or match.group("post_n2"),
    +            ),
    +            dev=_parse_letter_version(
    +                match.group("dev_l"),
    +                match.group("dev_n"),
    +            ),
    +            local=_parse_local_version(match.group("local")),
    +        )
    +
    +        # Generate a key which will be used for sorting
    +        self._key = _cmpkey(
    +            self._version.epoch,
    +            self._version.release,
    +            self._version.pre,
    +            self._version.post,
    +            self._version.dev,
    +            self._version.local,
    +        )
    +
    +    def __repr__(self):
    +        return "".format(repr(str(self)))
    +
    +    def __str__(self):
    +        parts = []
    +
    +        # Epoch
    +        if self._version.epoch != 0:
    +            parts.append("{0}!".format(self._version.epoch))
    +
    +        # Release segment
    +        parts.append(".".join(str(x) for x in self._version.release))
    +
    +        # Pre-release
    +        if self._version.pre is not None:
    +            parts.append("".join(str(x) for x in self._version.pre))
    +
    +        # Post-release
    +        if self._version.post is not None:
    +            parts.append(".post{0}".format(self._version.post[1]))
    +
    +        # Development release
    +        if self._version.dev is not None:
    +            parts.append(".dev{0}".format(self._version.dev[1]))
    +
    +        # Local version segment
    +        if self._version.local is not None:
    +            parts.append(
    +                "+{0}".format(".".join(str(x) for x in self._version.local))
    +            )
    +
    +        return "".join(parts)
    +
    +    @property
    +    def public(self):
    +        return str(self).split("+", 1)[0]
    +
    +    @property
    +    def base_version(self):
    +        parts = []
    +
    +        # Epoch
    +        if self._version.epoch != 0:
    +            parts.append("{0}!".format(self._version.epoch))
    +
    +        # Release segment
    +        parts.append(".".join(str(x) for x in self._version.release))
    +
    +        return "".join(parts)
    +
    +    @property
    +    def local(self):
    +        version_string = str(self)
    +        if "+" in version_string:
    +            return version_string.split("+", 1)[1]
    +
    +    @property
    +    def is_prerelease(self):
    +        return bool(self._version.dev or self._version.pre)
    +
    +    @property
    +    def is_postrelease(self):
    +        return bool(self._version.post)
    +
    +
    +def _parse_letter_version(letter, number):
    +    if letter:
    +        # We consider there to be an implicit 0 in a pre-release if there is
    +        # not a numeral associated with it.
    +        if number is None:
    +            number = 0
    +
    +        # We normalize any letters to their lower case form
    +        letter = letter.lower()
    +
    +        # We consider some words to be alternate spellings of other words and
    +        # in those cases we want to normalize the spellings to our preferred
    +        # spelling.
    +        if letter == "alpha":
    +            letter = "a"
    +        elif letter == "beta":
    +            letter = "b"
    +        elif letter in ["c", "pre", "preview"]:
    +            letter = "rc"
    +        elif letter in ["rev", "r"]:
    +            letter = "post"
    +
    +        return letter, int(number)
    +    if not letter and number:
    +        # We assume if we are given a number, but we are not given a letter
    +        # then this is using the implicit post release syntax (e.g. 1.0-1)
    +        letter = "post"
    +
    +        return letter, int(number)
    +
    +
    +_local_version_seperators = re.compile(r"[\._-]")
    +
    +
    +def _parse_local_version(local):
    +    """
    +    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    +    """
    +    if local is not None:
    +        return tuple(
    +            part.lower() if not part.isdigit() else int(part)
    +            for part in _local_version_seperators.split(local)
    +        )
    +
    +
    +def _cmpkey(epoch, release, pre, post, dev, local):
    +    # When we compare a release version, we want to compare it with all of the
    +    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    +    # leading zeros until we come to something non zero, then take the rest
    +    # re-reverse it back into the correct order and make it a tuple and use
    +    # that for our sorting key.
    +    release = tuple(
    +        reversed(list(
    +            itertools.dropwhile(
    +                lambda x: x == 0,
    +                reversed(release),
    +            )
    +        ))
    +    )
    +
    +    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    +    # We'll do this by abusing the pre segment, but we _only_ want to do this
    +    # if there is not a pre or a post segment. If we have one of those then
    +    # the normal sorting rules will handle this case correctly.
    +    if pre is None and post is None and dev is not None:
    +        pre = -Infinity
    +    # Versions without a pre-release (except as noted above) should sort after
    +    # those with one.
    +    elif pre is None:
    +        pre = Infinity
    +
    +    # Versions without a post segment should sort before those with one.
    +    if post is None:
    +        post = -Infinity
    +
    +    # Versions without a development segment should sort after those with one.
    +    if dev is None:
    +        dev = Infinity
    +
    +    if local is None:
    +        # Versions without a local segment should sort before those with one.
    +        local = -Infinity
    +    else:
    +        # Versions with a local segment need that segment parsed to implement
    +        # the sorting rules in PEP440.
    +        # - Alpha numeric segments sort before numeric segments
    +        # - Alpha numeric segments sort lexicographically
    +        # - Numeric segments sort numerically
    +        # - Shorter versions sort before longer versions when the prefixes
    +        #   match exactly
    +        local = tuple(
    +            (i, "") if isinstance(i, int) else (-Infinity, i)
    +            for i in local
    +        )
    +
    +    return epoch, release, pre, post, dev, local
    diff --git a/pymode/libs/pyflakes/__init__.py b/pymode/libs/pyflakes/__init__.py
    new file mode 100644
    index 00000000..1f047803
    --- /dev/null
    +++ b/pymode/libs/pyflakes/__init__.py
    @@ -0,0 +1 @@
    +__version__ = '0.9.2'
    diff --git a/pymode/libs/pyflakes/__main__.py b/pymode/libs/pyflakes/__main__.py
    new file mode 100644
    index 00000000..a69e6891
    --- /dev/null
    +++ b/pymode/libs/pyflakes/__main__.py
    @@ -0,0 +1,5 @@
    +from pyflakes.api import main
    +
    +# python -m pyflakes (with Python >= 2.7)
    +if __name__ == '__main__':
    +    main(prog='pyflakes')
    diff --git a/pymode/libs/pyflakes/api.py b/pymode/libs/pyflakes/api.py
    new file mode 100644
    index 00000000..3bc23306
    --- /dev/null
    +++ b/pymode/libs/pyflakes/api.py
    @@ -0,0 +1,175 @@
    +"""
    +API for the command-line I{pyflakes} tool.
    +"""
    +from __future__ import with_statement
    +
    +import sys
    +import os
    +import _ast
    +
    +from pyflakes import checker, __version__
    +from pyflakes import reporter as modReporter
    +
    +__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
    +
    +
    +def check(codeString, filename, reporter=None):
    +    """
    +    Check the Python source given by C{codeString} for flakes.
    +
    +    @param codeString: The Python source to check.
    +    @type codeString: C{str}
    +
    +    @param filename: The name of the file the source came from, used to report
    +        errors.
    +    @type filename: C{str}
    +
    +    @param reporter: A L{Reporter} instance, where errors and warnings will be
    +        reported.
    +
    +    @return: The number of warnings emitted.
    +    @rtype: C{int}
    +    """
    +    if reporter is None:
    +        reporter = modReporter._makeDefaultReporter()
    +    # First, compile into an AST and handle syntax errors.
    +    try:
    +        tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST)
    +    except SyntaxError:
    +        value = sys.exc_info()[1]
    +        msg = value.args[0]
    +
    +        (lineno, offset, text) = value.lineno, value.offset, value.text
    +
    +        # If there's an encoding problem with the file, the text is None.
    +        if text is None:
    +            # Avoid using msg, since for the only known case, it contains a
    +            # bogus message that claims the encoding the file declared was
    +            # unknown.
    +            reporter.unexpectedError(filename, 'problem decoding source')
    +        else:
    +            reporter.syntaxError(filename, msg, lineno, offset, text)
    +        return 1
    +    except Exception:
    +        reporter.unexpectedError(filename, 'problem decoding source')
    +        return 1
    +    # Okay, it's syntactically valid.  Now check it.
    +    w = checker.Checker(tree, filename)
    +    w.messages.sort(key=lambda m: m.lineno)
    +    for warning in w.messages:
    +        reporter.flake(warning)
    +    return len(w.messages)
    +
    +
    +def checkPath(filename, reporter=None):
    +    """
    +    Check the given path, printing out any warnings detected.
    +
    +    @param reporter: A L{Reporter} instance, where errors and warnings will be
    +        reported.
    +
    +    @return: the number of warnings printed
    +    """
    +    if reporter is None:
    +        reporter = modReporter._makeDefaultReporter()
    +    try:
    +        # in Python 2.6, compile() will choke on \r\n line endings. In later
    +        # versions of python it's smarter, and we want binary mode to give
    +        # compile() the best opportunity to do the right thing WRT text
    +        # encodings.
    +        if sys.version_info < (2, 7):
    +            mode = 'rU'
    +        else:
    +            mode = 'rb'
    +
    +        with open(filename, mode) as f:
    +            codestr = f.read()
    +        if sys.version_info < (2, 7):
    +            codestr += '\n'     # Work around for Python <= 2.6
    +    except UnicodeError:
    +        reporter.unexpectedError(filename, 'problem decoding source')
    +        return 1
    +    except IOError:
    +        msg = sys.exc_info()[1]
    +        reporter.unexpectedError(filename, msg.args[1])
    +        return 1
    +    return check(codestr, filename, reporter)
    +
    +
    +def iterSourceCode(paths):
    +    """
    +    Iterate over all Python source files in C{paths}.
    +
    +    @param paths: A list of paths.  Directories will be recursed into and
    +        any .py files found will be yielded.  Any non-directories will be
    +        yielded as-is.
    +    """
    +    for path in paths:
    +        if os.path.isdir(path):
    +            for dirpath, dirnames, filenames in os.walk(path):
    +                for filename in filenames:
    +                    if filename.endswith('.py'):
    +                        yield os.path.join(dirpath, filename)
    +        else:
    +            yield path
    +
    +
    +def checkRecursive(paths, reporter):
    +    """
    +    Recursively check all source files in C{paths}.
    +
    +    @param paths: A list of paths to Python source files and directories
    +        containing Python source files.
    +    @param reporter: A L{Reporter} where all of the warnings and errors
    +        will be reported to.
    +    @return: The number of warnings found.
    +    """
    +    warnings = 0
    +    for sourcePath in iterSourceCode(paths):
    +        warnings += checkPath(sourcePath, reporter)
    +    return warnings
    +
    +
    +def _exitOnSignal(sigName, message):
    +    """Handles a signal with sys.exit.
    +
    +    Some of these signals (SIGPIPE, for example) don't exist or are invalid on
    +    Windows. So, ignore errors that might arise.
    +    """
    +    import signal
    +
    +    try:
    +        sigNumber = getattr(signal, sigName)
    +    except AttributeError:
    +        # the signal constants defined in the signal module are defined by
    +        # whether the C library supports them or not. So, SIGPIPE might not
    +        # even be defined.
    +        return
    +
    +    def handler(sig, f):
    +        sys.exit(message)
    +
    +    try:
    +        signal.signal(sigNumber, handler)
    +    except ValueError:
    +        # It's also possible the signal is defined, but then it's invalid. In
    +        # this case, signal.signal raises ValueError.
    +        pass
    +
    +
    +def main(prog=None):
    +    """Entry point for the script "pyflakes"."""
    +    import optparse
    +
    +    # Handle "Keyboard Interrupt" and "Broken pipe" gracefully
    +    _exitOnSignal('SIGINT', '... stopped')
    +    _exitOnSignal('SIGPIPE', 1)
    +
    +    parser = optparse.OptionParser(prog=prog, version=__version__)
    +    (__, args) = parser.parse_args()
    +    reporter = modReporter._makeDefaultReporter()
    +    if args:
    +        warnings = checkRecursive(args, reporter)
    +    else:
    +        warnings = check(sys.stdin.read(), '', reporter)
    +    raise SystemExit(warnings > 0)
    diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py b/pymode/libs/pyflakes/checker.py
    similarity index 92%
    rename from pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py
    rename to pymode/libs/pyflakes/checker.py
    index 70558324..e6e19427 100644
    --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/checker.py
    +++ b/pymode/libs/pyflakes/checker.py
    @@ -448,6 +448,10 @@ def addBinding(self, node, value):
                 elif isinstance(existing, Importation) and value.redefines(existing):
                     existing.redefined.append(node)
     
    +        if value.name in self.scope:
    +            # then assume the rebound name is used as a global or within a loop
    +            value.used = self.scope[value.name].used
    +
             self.scope[value.name] = value
     
         def getNodeHandler(self, node_class):
    @@ -471,7 +475,7 @@ def handleNodeLoad(self, node):
                 return
     
             scopes = [scope for scope in self.scopeStack[:-1]
    -                  if isinstance(scope, (FunctionScope, ModuleScope))]
    +                  if isinstance(scope, (FunctionScope, ModuleScope, GeneratorScope))]
             if isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scopeStack[-2]:
                 scopes.append(self.scopeStack[-2])
     
    @@ -526,14 +530,30 @@ def handleNodeStore(self, node):
                 binding = ExportBinding(name, node.parent, self.scope)
             else:
                 binding = Assignment(name, node)
    -        if name in self.scope:
    -            binding.used = self.scope[name].used
             self.addBinding(node, binding)
     
         def handleNodeDelete(self, node):
    +
    +        def on_conditional_branch():
    +            """
    +            Return `True` if node is part of a conditional body.
    +            """
    +            current = getattr(node, 'parent', None)
    +            while current:
    +                if isinstance(current, (ast.If, ast.While, ast.IfExp)):
    +                    return True
    +                current = getattr(current, 'parent', None)
    +            return False
    +
             name = getNodeName(node)
             if not name:
                 return
    +
    +        if on_conditional_branch():
    +            # We can not predict if this conditional branch is going to
    +            # be executed.
    +            return
    +
             if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
                 self.scope.globals.remove(name)
             else:
    @@ -630,8 +650,9 @@ def ignore(self, node):
             pass
     
         # "stmt" type nodes
    -    DELETE = PRINT = FOR = WHILE = IF = WITH = WITHITEM = RAISE = \
    -        TRYFINALLY = ASSERT = EXEC = EXPR = ASSIGN = handleChildren
    +    DELETE = PRINT = FOR = ASYNCFOR = WHILE = IF = WITH = WITHITEM = \
    +        ASYNCWITH = ASYNCWITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = \
    +        EXPR = ASSIGN = handleChildren
     
         CONTINUE = BREAK = PASS = ignore
     
    @@ -654,14 +675,36 @@ def ignore(self, node):
             EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore
     
         # additional node types
    -    LISTCOMP = COMPREHENSION = KEYWORD = handleChildren
    +    COMPREHENSION = KEYWORD = handleChildren
     
         def GLOBAL(self, node):
             """
             Keep track of globals declarations.
             """
    -        if isinstance(self.scope, FunctionScope):
    -            self.scope.globals.update(node.names)
    +        # In doctests, the global scope is an anonymous function at index 1.
    +        global_scope_index = 1 if self.withDoctest else 0
    +        global_scope = self.scopeStack[global_scope_index]
    +
    +        # Ignore 'global' statement in global scope.
    +        if self.scope is not global_scope:
    +
    +            # One 'global' statement can bind multiple (comma-delimited) names.
    +            for node_name in node.names:
    +                node_value = Assignment(node_name, node)
    +
    +                # Remove UndefinedName messages already reported for this name.
    +                self.messages = [
    +                    m for m in self.messages if not
    +                    isinstance(m, messages.UndefinedName) and not
    +                    m.message_args[0] == node_name]
    +
    +                # Bind name to global scope if it doesn't exist already.
    +                global_scope.setdefault(node_name, node_value)
    +
    +                # Bind name to non-global scopes, but as already "used".
    +                node_value.used = (global_scope, node)
    +                for scope in self.scopeStack[global_scope_index + 1:]:
    +                    scope[node_name] = node_value
     
         NONLOCAL = GLOBAL
     
    @@ -670,6 +713,8 @@ def GENERATOREXP(self, node):
             self.handleChildren(node)
             self.popScope()
     
    +    LISTCOMP = handleChildren if PY2 else GENERATOREXP
    +
         DICTCOMP = SETCOMP = GENERATOREXP
     
         def NAME(self, node):
    @@ -693,6 +738,10 @@ def NAME(self, node):
                 raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
     
         def RETURN(self, node):
    +        if isinstance(self.scope, ClassScope):
    +            self.report(messages.ReturnOutsideFunction, node)
    +            return
    +
             if (
                 node.value and
                 hasattr(self.scope, 'returnValue') and
    @@ -705,7 +754,7 @@ def YIELD(self, node):
             self.scope.isGenerator = True
             self.handleNode(node.value, node)
     
    -    YIELDFROM = YIELD
    +    AWAIT = YIELDFROM = YIELD
     
         def FUNCTIONDEF(self, node):
             for deco in node.decorator_list:
    @@ -715,6 +764,8 @@ def FUNCTIONDEF(self, node):
             if self.withDoctest:
                 self.deferFunction(lambda: self.handleDoctests(node))
     
    +    ASYNCFUNCTIONDEF = FUNCTIONDEF
    +
         def LAMBDA(self, node):
             args = []
             annotations = []
    diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py b/pymode/libs/pyflakes/messages.py
    similarity index 94%
    rename from pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py
    rename to pymode/libs/pyflakes/messages.py
    index 1f799ec5..8899b7b0 100644
    --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/messages.py
    +++ b/pymode/libs/pyflakes/messages.py
    @@ -100,14 +100,6 @@ def __init__(self, filename, loc, name):
             self.message_args = (name,)
     
     
    -class Redefined(Message):
    -    message = 'redefinition of %r from line %r'
    -
    -    def __init__(self, filename, loc, name, orig_loc):
    -        Message.__init__(self, filename, loc)
    -        self.message_args = (name, orig_loc.lineno)
    -
    -
     class LateFutureImport(Message):
         message = 'future import(s) %r after other statements'
     
    @@ -133,3 +125,10 @@ class ReturnWithArgsInsideGenerator(Message):
         Indicates a return statement with arguments inside a generator.
         """
         message = '\'return\' with argument inside generator'
    +
    +
    +class ReturnOutsideFunction(Message):
    +    """
    +    Indicates a return statement outside of a function/method.
    +    """
    +    message = '\'return\' outside function'
    diff --git a/pymode/libs/pyflakes/reporter.py b/pymode/libs/pyflakes/reporter.py
    new file mode 100644
    index 00000000..ae645bdf
    --- /dev/null
    +++ b/pymode/libs/pyflakes/reporter.py
    @@ -0,0 +1,81 @@
    +"""
    +Provide the Reporter class.
    +"""
    +
    +import re
    +import sys
    +
    +
    +class Reporter(object):
    +    """
    +    Formats the results of pyflakes checks to users.
    +    """
    +
    +    def __init__(self, warningStream, errorStream):
    +        """
    +        Construct a L{Reporter}.
    +
    +        @param warningStream: A file-like object where warnings will be
    +            written to.  The stream's C{write} method must accept unicode.
    +            C{sys.stdout} is a good value.
    +        @param errorStream: A file-like object where error output will be
    +            written to.  The stream's C{write} method must accept unicode.
    +            C{sys.stderr} is a good value.
    +        """
    +        self._stdout = warningStream
    +        self._stderr = errorStream
    +
    +    def unexpectedError(self, filename, msg):
    +        """
    +        An unexpected error occurred trying to process C{filename}.
    +
    +        @param filename: The path to a file that we could not process.
    +        @ptype filename: C{unicode}
    +        @param msg: A message explaining the problem.
    +        @ptype msg: C{unicode}
    +        """
    +        self._stderr.write("%s: %s\n" % (filename, msg))
    +
    +    def syntaxError(self, filename, msg, lineno, offset, text):
    +        """
    +        There was a syntax errror in C{filename}.
    +
    +        @param filename: The path to the file with the syntax error.
    +        @ptype filename: C{unicode}
    +        @param msg: An explanation of the syntax error.
    +        @ptype msg: C{unicode}
    +        @param lineno: The line number where the syntax error occurred.
    +        @ptype lineno: C{int}
    +        @param offset: The column on which the syntax error occurred, or None.
    +        @ptype offset: C{int}
    +        @param text: The source code containing the syntax error.
    +        @ptype text: C{unicode}
    +        """
    +        line = text.splitlines()[-1]
    +        if offset is not None:
    +            offset = offset - (len(text) - len(line))
    +            self._stderr.write('%s:%d:%d: %s\n' %
    +                               (filename, lineno, offset + 1, msg))
    +        else:
    +            self._stderr.write('%s:%d: %s\n' % (filename, lineno, msg))
    +        self._stderr.write(line)
    +        self._stderr.write('\n')
    +        if offset is not None:
    +            self._stderr.write(re.sub(r'\S', ' ', line[:offset]) +
    +                               "^\n")
    +
    +    def flake(self, message):
    +        """
    +        pyflakes found something wrong with the code.
    +
    +        @param: A L{pyflakes.messages.Message}.
    +        """
    +        self._stdout.write(str(message))
    +        self._stdout.write('\n')
    +
    +
    +def _makeDefaultReporter():
    +    """
    +    Make a reporter that can be used when no reporter is specified.
    +    """
    +    return Reporter(sys.stdout, sys.stderr)
    diff --git a/pymode/libs/pylama/__init__.py b/pymode/libs/pylama/__init__.py
    index 1576bfd2..6662db91 100644
    --- a/pymode/libs/pylama/__init__.py
    +++ b/pymode/libs/pylama/__init__.py
    @@ -1,11 +1,11 @@
    -""" Code audit tool for python.
    +"""
    +Code audit tool for python.
     
     :copyright: 2013 by Kirill Klenov.
     :license: BSD, see LICENSE for more details.
    -
     """
     
    -__version__ = "5.0.5"
    +__version__ = "7.0.3"
     __project__ = "pylama"
     __author__ = "Kirill Klenov "
     __license__ = "GNU LGPL"
    diff --git a/pymode/libs/pylama/__main__.py b/pymode/libs/pylama/__main__.py
    new file mode 100644
    index 00000000..64994e75
    --- /dev/null
    +++ b/pymode/libs/pylama/__main__.py
    @@ -0,0 +1,6 @@
    +"""Support the module execution."""
    +
    +from .main import shell
    +
    +if __name__ == '__main__':
    +    shell()
    diff --git a/pymode/libs/pylama/tasks.py b/pymode/libs/pylama/async.py
    similarity index 55%
    rename from pymode/libs/pylama/tasks.py
    rename to pymode/libs/pylama/async.py
    index 69881e1e..12f929fa 100644
    --- a/pymode/libs/pylama/tasks.py
    +++ b/pymode/libs/pylama/async.py
    @@ -1,15 +1,16 @@
    -""" Support for asyncronious code checking. """
    +""" Support for asyncronious checking. """
     
     import logging
     import threading
    -from os import path as op
    +
    +from .core import run
    +
    +
     try:
         import Queue
     except ImportError:
         import queue as Queue
     
    -from .core import run
    -
     
     try:
         import multiprocessing
    @@ -36,41 +37,33 @@ def run(self):
             """ Run tasks from queue. """
             while True:
                 path, params = self.path_queue.get()
    -            errors = check_path(path, **params)
    +            errors = run(path, **params)
                 self.result_queue.put(errors)
                 self.path_queue.task_done()
     
     
    -def async_check_files(paths, options, rootpath=None):
    -    """ Check paths.
    +def check_async(paths, options, rootdir=None):
    +    """ Check given paths asynchronously.
     
         :return list: list of errors
     
         """
    -    errors = []
    -
    -    # Disable async if pylint enabled
    -    async = options.async and 'pylint' not in options.linters
    -
    -    if not async:
    -        for path in paths:
    -            errors += check_path(path, options=options, rootpath=rootpath)
    -        return errors
    -
         LOGGER.info('Async code checking is enabled.')
         path_queue = Queue.Queue()
         result_queue = Queue.Queue()
     
    -    for _ in range(CPU_COUNT):
    +    for num in range(CPU_COUNT):
             worker = Worker(path_queue, result_queue)
             worker.setDaemon(True)
    +        LOGGER.info('Start worker #%s', (num + 1))
             worker.start()
     
         for path in paths:
    -        path_queue.put((path, dict(options=options, rootpath=rootpath)))
    +        path_queue.put((path, dict(options=options, rootdir=rootdir)))
     
         path_queue.join()
     
    +    errors = []
         while True:
             try:
                 errors += result_queue.get(False)
    @@ -80,23 +73,4 @@ def async_check_files(paths, options, rootpath=None):
         return errors
     
     
    -def check_path(path, options=None, rootpath=None, code=None):
    -    """ Check path.
    -
    -    :return list: list of errors
    -
    -    """
    -    LOGGER.info("Parse file: %s", path)
    -
    -    rootpath = rootpath or '.'
    -    errors = []
    -    for error in run(path, code, options):
    -        try:
    -            error._info['rel'] = op.relpath(error.filename, rootpath)
    -            errors.append(error)
    -        except KeyError:
    -            continue
    -
    -    return errors
    -
    -# pylama:ignore=W0212
    +# pylama:ignore=W0212,D210,F0001
    diff --git a/pymode/libs/pylama/config.py b/pymode/libs/pylama/config.py
    index 881e930a..3df38829 100644
    --- a/pymode/libs/pylama/config.py
    +++ b/pymode/libs/pylama/config.py
    @@ -1,8 +1,8 @@
     """ Parse arguments from command line and configuration files. """
     import fnmatch
    -import sys
     import os
    -from re import compile as re
    +import sys
    +import re
     
     import logging
     from argparse import ArgumentParser
    @@ -11,6 +11,17 @@
     from .libs.inirama import Namespace
     from .lint.extensions import LINTERS
     
    +#: A default checkers
    +DEFAULT_LINTERS = 'pep8', 'pyflakes', 'mccabe'
    +
    +CURDIR = os.getcwd()
    +CONFIG_FILES = 'pylama.ini', 'setup.cfg', 'tox.ini', 'pytest.ini'
    +
    +#: The skip pattern
    +SKIP_PATTERN = re.compile(r'# *noqa\b', re.I).search
    +
    +# Parse a modelines
    +MODELINE_RE = re.compile(r'^\s*#\s+(?:pylama:)\s*((?:[\w_]*=[^:\n\s]+:?)+)', re.I | re.M)
     
     # Setup a logger
     LOGGER = logging.getLogger('pylama')
    @@ -18,15 +29,6 @@
     STREAM = logging.StreamHandler(sys.stdout)
     LOGGER.addHandler(STREAM)
     
    -#: A default checkers
    -DEFAULT_LINTERS = 'pep8', 'pyflakes', 'mccabe'
    -
    -CURDIR = os.getcwd()
    -CONFIG_FILES = [
    -    os.path.join(CURDIR, basename) for basename in
    -    ('pylama.ini', 'setup.cfg', 'tox.ini', 'pytest.ini')
    -]
    -
     
     class _Default(object):
     
    @@ -69,8 +71,8 @@ def parse_linters(linters):
     
     PARSER = ArgumentParser(description="Code audit tool for python.")
     PARSER.add_argument(
    -    "path", nargs='?', default=_Default(CURDIR),
    -    help="Path on file or directory for code check.")
    +    "paths", nargs='*', default=_Default([CURDIR]),
    +    help="Paths to files or directories for code check.")
     
     PARSER.add_argument(
         "--verbose", "-v", action='store_true', help="Verbose mode.")
    @@ -86,6 +88,9 @@ def parse_linters(linters):
         "--select", "-s", default=_Default(''), type=split_csp_str,
         help="Select errors and warnings. (comma-separated list)")
     
    +PARSER.add_argument(
    +    "--sort", default=_Default(''), type=split_csp_str,
    +    help="Sort result by error types. Ex. E,W,D")
     
     PARSER.add_argument(
         "--linters", "-l", default=_Default(','.join(DEFAULT_LINTERS)),
    @@ -100,7 +105,7 @@ def parse_linters(linters):
     
     PARSER.add_argument(
         "--skip", default=_Default(''),
    -    type=lambda s: [re(fnmatch.translate(p)) for p in s.split(',') if p],
    +    type=lambda s: [re.compile(fnmatch.translate(p)) for p in s.split(',') if p],
         help="Skip files by masks (comma-separated, Ex. */messages.py)")
     
     PARSER.add_argument("--report", "-r", help="Send report to file [REPORT]")
    @@ -120,11 +125,15 @@ def parse_linters(linters):
         "--force", "-F", action='store_true', default=_Default(False),
         help="Force code checking (if linter doesnt allow)")
     
    +PARSER.add_argument(
    +    "--abspath", "-a", action='store_true', default=_Default(False),
    +    help="Use absolute paths in output.")
    +
     
     ACTIONS = dict((a.dest, a) for a in PARSER._actions)
     
     
    -def parse_options(args=None, config=True, **overrides): # noqa
    +def parse_options(args=None, config=True, rootdir=CURDIR, **overrides): # noqa
         """ Parse options from command line and configuration files.
     
         :return argparse.Namespace:
    @@ -146,11 +155,13 @@ def parse_options(args=None, config=True, **overrides): # noqa
     
         # Compile options from ini
         if config:
    -        cfg = get_config(str(options.options))
    +        cfg = get_config(str(options.options), rootdir=rootdir)
             for k, v in cfg.default.items():
                 LOGGER.info('Find option %s (%s)', k, v)
                 passed_value = getattr(options, k, _Default())
                 if isinstance(passed_value, _Default):
    +                if k == 'paths':
    +                    v = v.split()
                     setattr(options, k, _Default(v))
     
             # Parse file related options
    @@ -168,7 +179,7 @@ def parse_options(args=None, config=True, **overrides): # noqa
                     options.linters_params[name] = dict(opts)
                     continue
     
    -            mask = re(fnmatch.translate(name))
    +            mask = re.compile(fnmatch.translate(name))
                 options.file_params[mask] = dict(opts)
     
         # Postprocess options
    @@ -177,6 +188,10 @@ def parse_options(args=None, config=True, **overrides): # noqa
             if isinstance(value, _Default):
                 setattr(options, name, process_value(name, value.value))
     
    +    if options.async and 'pylint' in options.linters:
    +        LOGGER.warn('Cant parse code asynchronously while pylint is enabled.')
    +        options.async = False
    +
         return options
     
     
    @@ -195,7 +210,7 @@ def process_value(name, value):
         return value
     
     
    -def get_config(ini_path=None):
    +def get_config(ini_path=None, rootdir=CURDIR):
         """ Load configuration from INI.
     
         :return Namespace:
    @@ -206,6 +221,7 @@ def get_config(ini_path=None):
     
         if not ini_path:
             for path in CONFIG_FILES:
    +            path = os.path.join(rootdir, path)
                 if os.path.isfile(path) and os.access(path, os.R_OK):
                     config.read(path)
         else:
    @@ -222,4 +238,4 @@ def setup_logger(options):
             LOGGER.addHandler(logging.FileHandler(options.report, mode='w'))
         LOGGER.info('Try to read configuration from: ' + options.options)
     
    -# pylama:ignore=W0212
    +# pylama:ignore=W0212,D210,F0001
    diff --git a/pymode/libs/pylama/core.py b/pymode/libs/pylama/core.py
    index 1283a662..c0522bf4 100644
    --- a/pymode/libs/pylama/core.py
    +++ b/pymode/libs/pylama/core.py
    @@ -3,33 +3,25 @@
     Prepare params, check a modeline and run the checkers.
     
     """
    -import re
    -
     import logging
    -from collections import defaultdict
     
    -from .config import process_value, LOGGER
    +import os.path as op
    +from .config import process_value, LOGGER, MODELINE_RE, SKIP_PATTERN, CURDIR
    +from .errors import Error, remove_duplicates
     from .lint.extensions import LINTERS
    -from .errors import DUPLICATES, Error
    -
    -
    -#: The skip pattern
    -SKIP_PATTERN = re.compile(r'# *noqa\b', re.I).search
    -
    -# Parse a modelines
    -MODELINE_RE = re.compile(
    -    r'^\s*#\s+(?:pylama:)\s*((?:[\w_]*=[^:\n\s]+:?)+)',
    -    re.I | re.M)
     
     
    -def run(path='', code=None, options=None):
    -    """ Run a code checkers with given params.
    +def run(path='', code=None, rootdir=CURDIR, options=None):
    +    """ Run code checkers with given params.
     
    +    :param path: (str) A file's path.
    +    :param code: (str) A code source
         :return errors: list of dictionaries with error's information
     
         """
         errors = []
         fileconfig = dict()
    +    lname = 'undefined'
         params = dict()
         linters = LINTERS
         linters_params = dict()
    @@ -43,6 +35,7 @@ def run(path='', code=None, options=None):
     
         try:
             with CodeContext(code, path) as ctx:
    +            path = op.relpath(path, rootdir)
                 code = ctx.code
                 params = prepare_params(parse_modeline(code), fileconfig, options)
                 LOGGER.debug('Checking params: %s', params)
    @@ -50,7 +43,7 @@ def run(path='', code=None, options=None):
                 if params.get('skip'):
                     return errors
     
    -            for item in linters:
    +            for item in params.get('linters') or linters:
     
                     if not isinstance(item, tuple):
                         item = (item, LINTERS.get(item))
    @@ -89,7 +82,11 @@ def run(path='', code=None, options=None):
         if code and errors:
             errors = filter_skiplines(code, errors)
     
    -    return sorted(errors, key=lambda e: e.lnum)
    +    key = lambda e: e.lnum
    +    if options and options.sort:
    +        sort = dict((v, n) for n, v in enumerate(options.sort, 1))
    +        key = lambda e: (sort.get(e.type, 999), e.lnum)
    +    return sorted(errors, key=key)
     
     
     def parse_modeline(code):
    @@ -111,13 +108,13 @@ def prepare_params(modeline, fileconfig, options):
         :return dict:
     
         """
    -    params = dict(skip=False, ignore=[], select=[])
    +    params = dict(skip=False, ignore=[], select=[], linters=[])
         if options:
    -        params['ignore'] = options.ignore
    -        params['select'] = options.select
    +        params['ignore'] = list(options.ignore)
    +        params['select'] = list(options.select)
     
         for config in filter(None, [modeline, fileconfig]):
    -        for key in ('ignore', 'select'):
    +        for key in ('ignore', 'select', 'linters'):
                 params[key] += process_value(key, config.get(key, []))
             params['skip'] = bool(int(config.get('skip', False)))
     
    @@ -170,18 +167,6 @@ def filter_skiplines(code, errors):
         return errors
     
     
    -def remove_duplicates(errors):
    -    """ Remove same errors from others linters. """
    -    passed = defaultdict(list)
    -    for error in errors:
    -        key = error.linter, error.number
    -        if key in DUPLICATES:
    -            if key in passed[error.lnum]:
    -                continue
    -            passed[error.lnum] = DUPLICATES[key]
    -        yield error
    -
    -
     class CodeContext(object):
     
         """ Read file if code is None. """
    @@ -193,16 +178,19 @@ def __init__(self, code, path):
             self._file = None
     
         def __enter__(self):
    -        """ Open file and read a code. """
    +        """ Open a file and read it. """
             if self.code is None:
    +            LOGGER.info("File is reading: %s", self.path)
                 self._file = open(self.path, 'rU')
                 self.code = self._file.read()
             return self
     
         def __exit__(self, t, value, traceback):
    -        """ Close opened file. """
    +        """ Close the file which was opened. """
             if self._file is not None:
                 self._file.close()
     
             if t and LOGGER.level == logging.DEBUG:
                 LOGGER.debug(traceback)
    +
    +# pylama:ignore=R0912,D210,F0001
    diff --git a/pymode/libs/pylama/errors.py b/pymode/libs/pylama/errors.py
    index 9e80d2a6..7f6c0a11 100644
    --- a/pymode/libs/pylama/errors.py
    +++ b/pymode/libs/pylama/errors.py
    @@ -1,13 +1,13 @@
    -""" Dont duplicate errors same type. """
    +""" Don't duplicate same errors from different linters. """
    +
    +from collections import defaultdict
    +
     
     DUPLICATES = (
     
         # multiple statements on one line
         [('pep8', 'E701'), ('pylint', 'C0321')],
     
    -    # missing whitespace around operator
    -    [('pep8', 'E225'), ('pylint', 'C0326')],
    -
         # unused variable
         [('pylint', 'W0612'), ('pyflakes', 'W0612')],
     
    @@ -17,15 +17,24 @@
         # unused import
         [('pylint', 'W0611'), ('pyflakes', 'W0611')],
     
    +    # whitespace before ')'
    +    [('pylint', 'C0326'), ('pep8', 'E202')],
    +
    +    # whitespace before '('
    +    [('pylint', 'C0326'), ('pep8', 'E211')],
    +
    +    # multiple spaces after operator
    +    [('pylint', 'C0326'), ('pep8', 'E222')],
    +
    +    # missing whitespace around operator
    +    [('pylint', 'C0326'), ('pep8', 'E225')],
    +
         # unexpected spaces
         [('pylint', 'C0326'), ('pep8', 'E251')],
     
         # long lines
         [('pylint', 'C0301'), ('pep8', 'E501')],
     
    -    # whitespace before '('
    -    [('pylint', 'C0326'), ('pep8', 'E211')],
    -
         # statement ends with a semicolon
         [('pylint', 'W0301'), ('pep8', 'E703')],
     
    @@ -35,14 +44,32 @@
         # bad indentation
         [('pylint', 'W0311'), ('pep8', 'E111')],
     
    +    # wildcart import
    +    [('pylint', 'W00401'), ('pyflakes', 'W0401')],
    +
    +    # module docstring
    +    [('pep257', 'D100'), ('pylint', 'C0111')],
    +
     )
     
     DUPLICATES = dict((key, values) for values in DUPLICATES for key in values)
     
     
    +def remove_duplicates(errors):
    +    """ Filter duplicates from given error's list. """
    +    passed = defaultdict(list)
    +    for error in errors:
    +        key = error.linter, error.number
    +        if key in DUPLICATES:
    +            if key in passed[error.lnum]:
    +                continue
    +            passed[error.lnum] = DUPLICATES[key]
    +        yield error
    +
    +
     class Error(object):
     
    -    """ Store error information. """
    +    """ Store an error's information. """
     
         def __init__(self, linter="", col=1, lnum=1, type="E",
                      text="unknown error", filename="", **kwargs):
    @@ -51,7 +78,7 @@ def __init__(self, linter="", col=1, lnum=1, type="E",
             if linter:
                 text = "%s [%s]" % (text, linter)
             number = text.split(' ', 1)[0]
    -        self._info = dict(linter=linter, col=col, lnum=lnum, type=type,
    +        self._info = dict(linter=linter, col=col, lnum=lnum, type=type[:1],
                               text=text, filename=filename, number=number)
     
         def __getattr__(self, name):
    diff --git a/pymode/libs/pylama/hook.py b/pymode/libs/pylama/hook.py
    index 0dc34069..f65ef46f 100644
    --- a/pymode/libs/pylama/hook.py
    +++ b/pymode/libs/pylama/hook.py
    @@ -6,7 +6,7 @@
     from os import path as op, chmod
     from subprocess import Popen, PIPE
     
    -from .main import LOGGER
    +from .main import LOGGER, process_paths
     from .config import parse_options, setup_logger
     
     
    @@ -30,18 +30,17 @@ def run(command):
     
     def git_hook():
         """ Run pylama after git commit. """
    -    from .main import check_files
    -
         _, files_modified, _ = run("git diff-index --cached --name-only HEAD")
     
         options = parse_options()
         setup_logger(options)
    -    check_files([f for f in map(str, files_modified)], options)
    +    candidates = list(map(str, files_modified))
    +    if candidates:
    +        process_paths(options, candidates=candidates)
     
     
     def hg_hook(ui, repo, node=None, **kwargs):
         """ Run pylama after mercurial commit. """
    -    from .main import check_files
         seen = set()
         paths = []
         if len(repo):
    @@ -55,7 +54,8 @@ def hg_hook(ui, repo, node=None, **kwargs):
     
         options = parse_options()
         setup_logger(options)
    -    check_files(paths, options)
    +    if paths:
    +        process_paths(options, candidates=paths)
     
     
     def install_git(path):
    @@ -79,7 +79,7 @@ def install_hg(path):
             open(hook, 'w+').close()
     
         c = ConfigParser()
    -    c.readfp(open(path, 'r'))
    +    c.readfp(open(hook, 'r'))
         if not c.has_section('hooks'):
             c.add_section('hooks')
     
    @@ -89,7 +89,7 @@ def install_hg(path):
         if not c.has_option('hooks', 'qrefresh'):
             c.set('hooks', 'qrefresh', 'python:pylama.hooks.hg_hook')
     
    -    c.write(open(path, 'w+'))
    +    c.write(open(hook, 'w+'))
     
     
     def install_hook(path):
    @@ -101,11 +101,11 @@ def install_hook(path):
             LOGGER.warn('Git hook has been installed.')
     
         elif op.exists(hg):
    -        install_hg(git)
    +        install_hg(hg)
             LOGGER.warn('Mercurial hook has been installed.')
     
         else:
             LOGGER.error('VCS has not found. Check your path.')
             sys.exit(1)
     
    -# lint_ignore=F0401,E1103
    +# pylama:ignore=F0401,E1103,D210,F0001
    diff --git a/pymode/libs/pylama/lint/__init__.py b/pymode/libs/pylama/lint/__init__.py
    index d5d75901..bd8e8da7 100644
    --- a/pymode/libs/pylama/lint/__init__.py
    +++ b/pymode/libs/pylama/lint/__init__.py
    @@ -1,22 +1,19 @@
    -""" Custom module loader. """
    +"""Custom module loader."""
     
     
    -class Linter(object): # noqa
    +class Linter(object):
     
    -    """ Abstract class for linter plugin. """
    +    """Abstract class for linter plugin."""
     
         @staticmethod
         def allow(path):
    -        """ Check path is relevant for linter.
    +        """Check path is relevant for linter.
     
             :return bool:
    -
             """
    -
             return path.endswith('.py')
     
         @staticmethod
         def run(path, **meta):
    -        """ Method 'run' should be defined. """
    -
    +        """Method 'run' should be defined."""
             raise NotImplementedError(__doc__)
    diff --git a/pymode/libs/pylama/lint/extensions.py b/pymode/libs/pylama/lint/extensions.py
    index 6e0bc3d2..7092fba3 100644
    --- a/pymode/libs/pylama/lint/extensions.py
    +++ b/pymode/libs/pylama/lint/extensions.py
    @@ -1,25 +1,30 @@
    -""" Load extensions. """
    +"""Load extensions."""
     
    -from os import listdir, path as op
    +LINTERS = {}
     
    +try:
    +    from pylama.lint.pylama_mccabe import Linter
    +    LINTERS['mccabe'] = Linter()
    +except ImportError:
    +    pass
     
    -CURDIR = op.dirname(__file__)
    -LINTERS = dict()
    -PREFIX = 'pylama_'
    +try:
    +    from pylama.lint.pylama_pep257 import Linter
    +    LINTERS['pep257'] = Linter()
    +except ImportError:
    +    pass
     
     try:
    -    from importlib import import_module
    +    from pylama.lint.pylama_pep8 import Linter
    +    LINTERS['pep8'] = Linter()
     except ImportError:
    -    from ..libs.importlib import import_module
    +    pass
     
    -for p in listdir(CURDIR):
    -    if p.startswith(PREFIX) and op.isdir(op.join(CURDIR, p)):
    -        name = p[len(PREFIX):]
    -        try:
    -            module = import_module('.lint.%s%s' % (PREFIX, name), 'pylama')
    -            LINTERS[name] = getattr(module, 'Linter')()
    -        except ImportError:
    -            continue
    +try:
    +    from pylama.lint.pylama_pyflakes import Linter
    +    LINTERS['pyflakes'] = Linter()
    +except ImportError:
    +    pass
     
     try:
         from pkg_resources import iter_entry_points
    @@ -29,3 +34,5 @@
                 LINTERS[entry.name] = entry.load()()
     except ImportError:
         pass
    +
    +#  pylama:ignore=E0611
    diff --git a/pymode/libs/pylama/lint/pylama_mccabe.py b/pymode/libs/pylama/lint/pylama_mccabe.py
    new file mode 100644
    index 00000000..fc191004
    --- /dev/null
    +++ b/pymode/libs/pylama/lint/pylama_mccabe.py
    @@ -0,0 +1,29 @@
    +"""Code complexity checking."""
    +from mccabe import McCabeChecker
    +
    +from pylama.lint import Linter as Abstract
    +import ast
    +
    +
    +class Linter(Abstract):
    +
    +    """Run complexity checking."""
    +
    +    @staticmethod
    +    def run(path, code=None, params=None, **meta):
    +        """MCCabe code checking.
    +
    +        :return list: List of errors.
    +        """
    +        try:
    +            tree = compile(code, path, "exec", ast.PyCF_ONLY_AST)
    +        except SyntaxError as exc:
    +            return [{'lnum': exc.lineno, 'text': 'Invalid syntax: %s' % exc.text.strip()}]
    +
    +        McCabeChecker.max_complexity = int(params.get('complexity', 10))
    +        return [
    +            {'lnum': lineno, 'offset': offset, 'text': text, 'type': McCabeChecker._code}
    +            for lineno, offset, text, _ in McCabeChecker(tree, path).run()
    +        ]
    +
    +#  pylama:ignore=W0212
    diff --git a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py b/pymode/libs/pylama/lint/pylama_mccabe/__init__.py
    deleted file mode 100644
    index da8b5f2a..00000000
    --- a/pymode/libs/pylama/lint/pylama_mccabe/__init__.py
    +++ /dev/null
    @@ -1,20 +0,0 @@
    -""" Check complexity. """
    -
    -from .. import Linter as BaseLinter
    -
    -
    -class Linter(BaseLinter):
    -
    -    """ Mccabe code complexity. """
    -
    -    @staticmethod
    -    def run(path, code=None, params=None, **meta):
    -        """ MCCabe code checking.
    -
    -        :return list: List of errors.
    -
    -        """
    -        from .mccabe import get_code_complexity
    -
    -        complexity = int(params.get('complexity', 10))
    -        return get_code_complexity(code, complexity, filename=path) or []
    diff --git a/pymode/libs/pylama/lint/pylama_pep257.py b/pymode/libs/pylama/lint/pylama_pep257.py
    new file mode 100644
    index 00000000..5e1f785c
    --- /dev/null
    +++ b/pymode/libs/pylama/lint/pylama_pep257.py
    @@ -0,0 +1,21 @@
    +"""PEP257 support."""
    +
    +from pep257 import PEP257Checker
    +
    +from pylama.lint import Linter as Abstract
    +
    +
    +class Linter(Abstract):
    +
    +    """Check PEP257 errors."""
    +
    +    @staticmethod
    +    def run(path, code=None, **meta):
    +        """PEP257 code checking.
    +
    +        :return list: List of errors.
    +        """
    +        return [
    +            {'lnum': e.line, 'text': e.message, 'type': 'D'}
    +            for e in PEP257Checker().check_source(code, path)
    +        ]
    diff --git a/pymode/libs/pylama/lint/pylama_pep257/__init__.py b/pymode/libs/pylama/lint/pylama_pep257/__init__.py
    deleted file mode 100644
    index 99474666..00000000
    --- a/pymode/libs/pylama/lint/pylama_pep257/__init__.py
    +++ /dev/null
    @@ -1,26 +0,0 @@
    -""" Check PEP257. """
    -
    -from .. import Linter as BaseLinter
    -
    -
    -class Linter(BaseLinter):
    -
    -    """ Mccabe code complexity. """
    -
    -    @staticmethod
    -    def run(path, code=None, **meta):
    -        """ PEP257 code checking.
    -
    -        :return list: List of errors.
    -
    -        """
    -        from .pep257 import PEP257Checker
    -
    -        errors = []
    -        for er in PEP257Checker().check_source(code, path):
    -            errors.append(dict(
    -                lnum=er.line,
    -                text=er.message,
    -                type='D',
    -            ))
    -        return errors
    diff --git a/pymode/libs/pylama/lint/pylama_pep257/pep257.py b/pymode/libs/pylama/lint/pylama_pep257/pep257.py
    deleted file mode 100644
    index c5df0f72..00000000
    --- a/pymode/libs/pylama/lint/pylama_pep257/pep257.py
    +++ /dev/null
    @@ -1,728 +0,0 @@
    -#! /usr/bin/env python
    -"""Static analysis tool for checking docstring conventions and style.
    -
    -Implemented checks cover PEP257:
    -http://www.python.org/dev/peps/pep-0257/
    -
    -Other checks can be added, e.g. NumPy docstring conventions:
    -https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
    -
    -The repository is located at:
    -http://github.com/GreenSteam/pep257
    -
    -"""
    -from __future__ import with_statement
    -
    -import os
    -import sys
    -import tokenize as tk
    -from itertools import takewhile, dropwhile, chain
    -from optparse import OptionParser
    -from re import compile as re
    -
    -
    -try:
    -    from StringIO import StringIO
    -except ImportError:  # Python 3.0 and later
    -    from io import StringIO
    -
    -
    -try:
    -    next
    -except NameError:  # Python 2.5 and earlier
    -    nothing = object()
    -
    -    def next(obj, default=nothing):
    -        if default == nothing:
    -            return obj.next()
    -        else:
    -            try:
    -                return obj.next()
    -            except StopIteration:
    -                return default
    -
    -
    -__version__ = '0.3.3-alpha'
    -__all__ = ('check', 'collect')
    -
    -
    -humanize = lambda string: re(r'(.)([A-Z]+)').sub(r'\1 \2', string).lower()
    -is_magic = lambda name: name.startswith('__') and name.endswith('__')
    -is_ascii = lambda string: all(ord(char) < 128 for char in string)
    -is_blank = lambda string: not string.strip()
    -leading_space = lambda string: re('\s*').match(string).group()
    -
    -
    -class Value(object):
    -
    -    __init__ = lambda self, *args: vars(self).update(zip(self._fields, args))
    -    __hash__ = lambda self: hash(repr(self))
    -    __eq__ = lambda self, other: other and vars(self) == vars(other)
    -
    -    def __repr__(self):
    -        args = [vars(self)[field] for field in self._fields]
    -        return '%s(%s)' % (self.__class__.__name__, ', '.join(map(repr, args)))
    -
    -
    -class Definition(Value):
    -
    -    _fields = 'name _source start end docstring children parent'.split()
    -
    -    _human = property(lambda self: humanize(type(self).__name__))
    -    kind = property(lambda self: self._human.split()[-1])
    -    module = property(lambda self: self.parent.module)
    -    all = property(lambda self: self.module.all)
    -    _slice = property(lambda self: slice(self.start - 1, self.end))
    -    source = property(lambda self: ''.join(self._source[self._slice]))
    -    __iter__ = lambda self: chain([self], *self.children)
    -
    -    @property
    -    def _publicity(self):
    -        return {True: 'public', False: 'private'}[self.is_public]
    -
    -    def __str__(self):
    -        return 'in %s %s `%s`' % (self._publicity, self._human, self.name)
    -
    -
    -class Module(Definition):
    -
    -    _fields = 'name _source start end docstring children parent _all'.split()
    -    is_public = True
    -    _nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s])
    -    module = property(lambda self: self)
    -    all = property(lambda self: self._all)
    -    __str__ = lambda self: 'at module level'
    -
    -
    -class Function(Definition):
    -
    -    _nest = staticmethod(lambda s: {'def': NestedFunction,
    -                                    'class': NestedClass}[s])
    -
    -    @property
    -    def is_public(self):
    -        if self.all is not None:
    -            return self.name in self.all
    -        else:  # TODO: are there any magic functions? not methods
    -            return not self.name.startswith('_') or is_magic(self.name)
    -
    -
    -class NestedFunction(Function):
    -
    -    is_public = False
    -
    -
    -class Method(Function):
    -
    -    @property
    -    def is_public(self):
    -        name_is_public = not self.name.startswith('_') or is_magic(self.name)
    -        return self.parent.is_public and name_is_public
    -
    -
    -class Class(Definition):
    -
    -    _nest = staticmethod(lambda s: {'def': Method, 'class': NestedClass}[s])
    -    is_public = Function.is_public
    -
    -
    -class NestedClass(Class):
    -
    -    is_public = False
    -
    -
    -class Token(Value):
    -
    -    _fields = 'kind value start end source'.split()
    -
    -
    -class TokenStream(object):
    -
    -    def __init__(self, filelike):
    -        self._generator = tk.generate_tokens(filelike.readline)
    -        self.current = Token(*next(self._generator, None))
    -        self.line = self.current.start[0]
    -
    -    def move(self):
    -        previous = self.current
    -        current = next(self._generator, None)
    -        self.current = None if current is None else Token(*current)
    -        self.line = self.current.start[0] if self.current else self.line
    -        return previous
    -
    -    def __iter__(self):
    -        while True:
    -            if self.current is not None:
    -                yield self.current
    -            else:
    -                return
    -            self.move()
    -
    -
    -class AllError(Exception):
    -
    -    def __init__(self, message):
    -        Exception.__init__(
    -            self, message +
    -            'That means pep257 cannot decide which definitions are public. '
    -            'Variable __all__ should be present at most once in each file, '
    -            "in form `__all__ = ('a_public_function', 'APublicClass', ...)`. "
    -            'More info on __all__: http://stackoverflow.com/q/44834/. ')
    -
    -
    -class Parser(object):
    -
    -    def __call__(self, filelike, filename):
    -        self.source = filelike.readlines()
    -        src = ''.join(self.source)
    -        self.stream = TokenStream(StringIO(src))
    -        self.filename = filename
    -        self.all = None
    -        return self.parse_module()
    -
    -    current = property(lambda self: self.stream.current)
    -    line = property(lambda self: self.stream.line)
    -
    -    def consume(self, kind):
    -        assert self.stream.move().kind == kind
    -
    -    def leapfrog(self, kind):
    -        for token in self.stream:
    -            if token.kind == kind:
    -                self.consume(kind)
    -                return
    -
    -    def parse_docstring(self):
    -        for token in self.stream:
    -            if token.kind in [tk.COMMENT, tk.NEWLINE, tk.NL]:
    -                continue
    -            elif token.kind == tk.STRING:
    -                return token.value
    -            else:
    -                return None
    -
    -    def parse_definitions(self, class_, all=False):
    -        for token in self.stream:
    -            if all and token.value == '__all__':
    -                self.parse_all()
    -            if token.value in ['def', 'class']:
    -                yield self.parse_definition(class_._nest(token.value))
    -            if token.kind == tk.INDENT:
    -                self.consume(tk.INDENT)
    -                for definition in self.parse_definitions(class_):
    -                    yield definition
    -            if token.kind == tk.DEDENT:
    -                return
    -
    -    def parse_all(self):
    -        assert self.current.value == '__all__'
    -        self.consume(tk.NAME)
    -        if self.current.value != '=':
    -            raise AllError('Could not evaluate contents of __all__. ')
    -        self.consume(tk.OP)
    -        if self.current.value not in '([':
    -            raise AllError('Could not evaluate contents of __all__. ')
    -        if self.current.value == '[':
    -            msg = ("%s WARNING: __all__ is defined as a list, this means "
    -                   "pep257 cannot reliably detect contents of the __all__ "
    -                   "variable, because it can be mutated. Change __all__ to be "
    -                   "an (immutable) tuple, to remove this warning. Note, "
    -                   "pep257 uses __all__ to detect which definitions are "
    -                   "public, to warn if public definitions are missing "
    -                   "docstrings. If __all__ is a (mutable) list, pep257 cannot "
    -                   "reliably assume its contents. pep257 will proceed "
    -                   "assuming __all__ is not mutated.\n" % self.filename)
    -            sys.stderr.write(msg)
    -        self.consume(tk.OP)
    -        s = '('
    -        while self.current.kind in (tk.NL, tk.COMMENT):
    -            self.stream.move()
    -        if self.current.kind != tk.STRING:
    -            raise AllError('Could not evaluate contents of __all__. ')
    -        while self.current.value not in ')]':
    -            s += self.current.value
    -            self.stream.move()
    -        s += ')'
    -        try:
    -            self.all = eval(s, {})
    -        except BaseException:
    -            raise AllError('Could not evaluate contents of __all__: %s. ' % s)
    -
    -    def parse_module(self):
    -        start = self.line
    -        docstring = self.parse_docstring()
    -        children = list(self.parse_definitions(Module, all=True))
    -        assert self.current is None
    -        end = self.line
    -        module = Module(self.filename, self.source, start, end,
    -                        docstring, children, None, self.all)
    -        for child in module.children:
    -            child.parent = module
    -        return module
    -
    -    def parse_definition(self, class_):
    -        start = self.line
    -        self.consume(tk.NAME)
    -        name = self.current.value
    -        self.leapfrog(tk.INDENT)
    -        assert self.current.kind != tk.INDENT
    -        docstring = self.parse_docstring()
    -        children = list(self.parse_definitions(class_))
    -        assert self.current.kind == tk.DEDENT
    -        end = self.line - 1
    -        definition = class_(name, self.source, start, end,
    -                            docstring, children, None)
    -        for child in definition.children:
    -            child.parent = definition
    -        return definition
    -
    -
    -class Error(object):
    -
    -    """Error in docstring style."""
    -
    -    # Options that define how errors are printed:
    -    explain = False
    -    source = False
    -
    -    def __init__(self, message=None, final=False):
    -        self.message, self.is_final = message, final
    -        self.definition, self.explanation = [None, None]
    -
    -    code = property(lambda self: self.message.partition(':')[0])
    -    filename = property(lambda self: self.definition.module.name)
    -    line = property(lambda self: self.definition.start)
    -
    -    @property
    -    def lines(self):
    -        source = ''
    -        lines = self.definition._source[self.definition._slice]
    -        offset = self.definition.start
    -        lines_stripped = list(reversed(list(dropwhile(is_blank,
    -                                                      reversed(lines)))))
    -        numbers_width = 0
    -        for n, line in enumerate(lines_stripped):
    -            numbers_width = max(numbers_width, n + offset)
    -        numbers_width = len(str(numbers_width))
    -        numbers_width = 6
    -        for n, line in enumerate(lines_stripped):
    -            source += '%*d: %s' % (numbers_width, n + offset, line)
    -            if n > 5:
    -                source += '        ...\n'
    -                break
    -        return source
    -
    -    def __str__(self):
    -        self.explanation = '\n'.join(l for l in self.explanation.split('\n')
    -                                     if not is_blank(l))
    -        template = '%(filename)s:%(line)s %(definition)s:\n        %(message)s'
    -        if self.source and self.explain:
    -            template += '\n\n%(explanation)s\n\n%(lines)s\n'
    -        elif self.source and not self.explain:
    -            template += '\n\n%(lines)s\n'
    -        elif self.explain and not self.source:
    -            template += '\n\n%(explanation)s\n\n'
    -        return template % dict((name, getattr(self, name)) for name in
    -                               ['filename', 'line', 'definition', 'message',
    -                                'explanation', 'lines'])
    -
    -    __repr__ = __str__
    -
    -    def __lt__(self, other):
    -        return (self.filename, self.line) < (other.filename, other.line)
    -
    -
    -def parse_options():
    -    parser = OptionParser(version=__version__,
    -                          usage='Usage: pep257 [options] [...]')
    -    option = parser.add_option
    -    option('-e', '--explain', action='store_true',
    -           help='show explanation of each error')
    -    option('-s', '--source', action='store_true',
    -           help='show source for each error')
    -    option('--ignore', metavar='', default='',
    -           help='ignore a list comma-separated error codes, '
    -                'for example: --ignore=D101,D202')
    -    option('--match', metavar='', default='(?!test_).*\.py',
    -           help="check only files that exactly match  regular "
    -                "expression; default is --match='(?!test_).*\.py' which "
    -                "matches files that don't start with 'test_' but end with "
    -                "'.py'")
    -    option('--match-dir', metavar='', default='[^\.].*',
    -           help="search only dirs that exactly match  regular "
    -                "expression; default is --match-dir='[^\.].*', which matches "
    -                "all dirs that don't start with a dot")
    -    return parser.parse_args()
    -
    -
    -def collect(names, match=lambda name: True, match_dir=lambda name: True):
    -    """Walk dir trees under `names` and generate filnames that `match`.
    -
    -    Example
    -    -------
    -    >>> sorted(collect(['non-dir.txt', './'],
    -    ...                match=lambda name: name.endswith('.py')))
    -    ['non-dir.txt', './pep257.py', './setup.py', './test_pep257.py']
    -
    -    """
    -    for name in names:  # map(expanduser, names):
    -        if os.path.isdir(name):
    -            for root, dirs, filenames in os.walk(name):
    -                for dir in dirs:
    -                    if not match_dir(dir):
    -                        dirs.remove(dir)  # do not visit those dirs
    -                for filename in filenames:
    -                    if match(filename):
    -                        yield os.path.join(root, filename)
    -        else:
    -            yield name
    -
    -
    -def check(filenames, ignore=()):
    -    """Generate PEP 257 errors that exist in `filenames` iterable.
    -
    -    Skips errors with error-codes defined in `ignore` iterable.
    -
    -    Example
    -    -------
    -    >>> check(['pep257.py'], ignore=['D100'])
    -    
    -
    -    """
    -    for filename in filenames:
    -        try:
    -            with open(filename) as file:
    -                source = file.read()
    -            for error in PEP257Checker().check_source(source, filename):
    -                code = getattr(error, 'code', None)
    -                if code is not None and code not in ignore:
    -                    yield error
    -        except (EnvironmentError, AllError):
    -            yield sys.exc_info()[1]
    -        except tk.TokenError:
    -            yield SyntaxError('invalid syntax in file %s' % filename)
    -
    -
    -def main(options, arguments):
    -    Error.explain = options.explain
    -    Error.source = options.source
    -    collected = collect(arguments or ['.'],
    -                        match=re(options.match + '$').match,
    -                        match_dir=re(options.match_dir + '$').match)
    -    code = 0
    -    for error in check(collected, ignore=options.ignore.split(',')):
    -        sys.stderr.write('%s\n' % error)
    -        code = 1
    -    return code
    -
    -
    -parse = Parser()
    -
    -
    -def check_for(kind, terminal=False):
    -    def decorator(f):
    -        f._check_for = kind
    -        f._terminal = terminal
    -        return f
    -    return decorator
    -
    -
    -class PEP257Checker(object):
    -
    -    """Checker for PEP 257.
    -
    -    D10x: Missing docstrings
    -    D20x: Whitespace issues
    -    D30x: Docstring formatting
    -    D40x: Docstring content issues
    -
    -    """
    -
    -    def check_source(self, source, filename):
    -        module = parse(StringIO(source), filename)
    -        for definition in module:
    -            for check in self.checks:
    -                terminate = False
    -                if isinstance(definition, check._check_for):
    -                    error = check(None, definition, definition.docstring)
    -                    errors = error if hasattr(error, '__iter__') else [error]
    -                    for error in errors:
    -                        if error is not None:
    -                            partition = check.__doc__.partition('.\n')
    -                            message, _, explanation = partition
    -                            if error.message is None:
    -                                error.message = message
    -                            error.explanation = explanation
    -                            error.definition = definition
    -                            yield error
    -                            if check._terminal:
    -                                terminate = True
    -                                break
    -                if terminate:
    -                    break
    -
    -    @property
    -    def checks(self):
    -        all = [check for check in vars(type(self)).values()
    -               if hasattr(check, '_check_for')]
    -        return sorted(all, key=lambda check: not check._terminal)
    -
    -    @check_for(Definition, terminal=True)
    -    def check_docstring_missing(self, definition, docstring):
    -        """D10{0,1,2,3}: Public definitions should have docstrings.
    -
    -        All modules should normally have docstrings.  [...] all functions and
    -        classes exported by a module should also have docstrings. Public
    -        methods (including the __init__ constructor) should also have
    -        docstrings.
    -
    -        Note: Public (exported) definitions are either those with names listed
    -              in __all__ variable (if present), or those that do not start
    -              with a single underscore.
    -
    -        """
    -        if (not docstring and definition.is_public or
    -                docstring and is_blank(eval(docstring))):
    -            codes = {Module: 'D100', Class: 'D101', NestedClass: 'D101',
    -                     Method: 'D102', Function: 'D103', NestedFunction: 'D103'}
    -            return Error('%s: Docstring missing' % codes[type(definition)])
    -
    -    @check_for(Definition)
    -    def check_one_liners(self, definition, docstring):
    -        """D200: One-liner docstrings should fit on one line with quotes.
    -
    -        The closing quotes are on the same line as the opening quotes.
    -        This looks better for one-liners.
    -
    -        """
    -        if docstring:
    -            lines = eval(docstring).split('\n')
    -            if len(lines) > 1:
    -                non_empty_lines = sum(1 for l in lines if not is_blank(l))
    -                if non_empty_lines == 1:
    -                    return Error('D200: One-line docstring should not occupy '
    -                                 '%s lines' % len(lines))
    -
    -    @check_for(Function)
    -    def check_no_blank_before(self, function, docstring):  # def
    -        """D20{1,2}: No blank lines allowed around function/method docstring.
    -
    -        There's no blank line either before or after the docstring.
    -
    -        """
    -        # NOTE: This does not take comments into account.
    -        # NOTE: This does not take into account functions with groups of code.
    -        if docstring:
    -            before, _, after = function.source.partition(docstring)
    -            blanks_before = list(map(is_blank, before.split('\n')[:-1]))
    -            blanks_after = list(map(is_blank, after.split('\n')[1:]))
    -            blanks_before_count = sum(takewhile(bool, reversed(blanks_before)))
    -            blanks_after_count = sum(takewhile(bool, blanks_after))
    -            if blanks_before_count != 0:
    -                yield Error('D201: No blank lines allowed *before* %s '
    -                            'docstring, found %s'
    -                            % (function.kind, blanks_before_count))
    -            if not all(blanks_after) and blanks_after_count != 0:
    -                yield Error('D202: No blank lines allowed *after* %s '
    -                            'docstring, found %s'
    -                            % (function.kind, blanks_after_count))
    -
    -    @check_for(Class)
    -    def check_blank_before_after_class(slef, class_, docstring):
    -        """D20{3,4}: Class docstring should have 1 blank line around them.
    -
    -        Insert a blank line before and after all docstrings (one-line or
    -        multi-line) that document a class -- generally speaking, the class's
    -        methods are separated from each other by a single blank line, and the
    -        docstring needs to be offset from the first method by a blank line;
    -        for symmetry, put a blank line between the class header and the
    -        docstring.
    -
    -        """
    -        # NOTE: this gives flase-positive in this case
    -        # class Foo:
    -        #
    -        #     """Docstring."""
    -        #
    -        #
    -        # # comment here
    -        # def foo(): pass
    -        if docstring:
    -            before, _, after = class_.source.partition(docstring)
    -            blanks_before = list(map(is_blank, before.split('\n')[:-1]))
    -            blanks_after = list(map(is_blank, after.split('\n')[1:]))
    -            blanks_before_count = sum(takewhile(bool, reversed(blanks_before)))
    -            blanks_after_count = sum(takewhile(bool, blanks_after))
    -            if blanks_before_count != 1:
    -                yield Error('D203: Expected 1 blank line *before* class '
    -                            'docstring, found %s' % blanks_before_count)
    -            if not all(blanks_after) and blanks_after_count != 1:
    -                yield Error('D204: Expected 1 blank line *after* class '
    -                            'docstring, found %s' % blanks_after_count)
    -
    -    @check_for(Definition)
    -    def check_blank_after_summary(self, definition, docstring):
    -        """D205: Blank line missing between one-line summary and description.
    -
    -        Multi-line docstrings consist of a summary line just like a one-line
    -        docstring, followed by a blank line, followed by a more elaborate
    -        description. The summary line may be used by automatic indexing tools;
    -        it is important that it fits on one line and is separated from the
    -        rest of the docstring by a blank line.
    -
    -        """
    -        if docstring:
    -            lines = eval(docstring).strip().split('\n')
    -            if len(lines) > 1 and not is_blank(lines[1]):
    -                return Error()
    -
    -    @check_for(Definition)
    -    def check_indent(self, definition, docstring):
    -        """D20{6,7,8}: The entire docstring should be indented same as code.
    -
    -        The entire docstring is indented the same as the quotes at its
    -        first line.
    -
    -        """
    -        if docstring:
    -            before_docstring, _, _ = definition.source.partition(docstring)
    -            _, _, indent = before_docstring.rpartition('\n')
    -            lines = docstring.split('\n')
    -            if len(lines) > 1:
    -                lines = lines[1:]  # First line does not need indent.
    -                indents = [leading_space(l) for l in lines if not is_blank(l)]
    -                if set(' \t') == set(''.join(indents) + indent):
    -                    return Error('D206: Docstring indented with both tabs and '
    -                                 'spaces')
    -                if (len(indents) > 1 and min(indents[:-1]) > indent
    -                        or indents[-1] > indent):
    -                    return Error('D208: Docstring is over-indented')
    -                if min(indents) < indent:
    -                    return Error('D207: Docstring is under-indented')
    -
    -    @check_for(Definition)
    -    def check_newline_after_last_paragraph(self, definition, docstring):
    -        """D209: Put multi-line docstring closing quotes on separate line.
    -
    -        Unless the entire docstring fits on a line, place the closing
    -        quotes on a line by themselves.
    -
    -        """
    -        if docstring:
    -            lines = [l for l in eval(docstring).split('\n') if not is_blank(l)]
    -            if len(lines) > 1:
    -                if docstring.split("\n")[-1].strip() not in ['"""', "'''"]:
    -                    return Error('D209: Put multi-line docstring closing '
    -                                 'quotes on separate line')
    -
    -    @check_for(Definition)
    -    def check_triple_double_quotes(self, definition, docstring):
    -        r'''D300: Use """triple double quotes""".
    -
    -        For consistency, always use """triple double quotes""" around
    -        docstrings. Use r"""raw triple double quotes""" if you use any
    -        backslashes in your docstrings. For Unicode docstrings, use
    -        u"""Unicode triple-quoted strings""".
    -
    -        Note: Exception to this is made if the docstring contains
    -              """ quotes in its body.
    -
    -        '''
    -        if docstring and '"""' in eval(docstring) and docstring.startswith(
    -                ("'''", "r'''", "u'''")):
    -            # Allow ''' quotes if docstring contains """, because otherwise """
    -            # quotes could not be expressed inside docstring.  Not in PEP 257.
    -            return
    -        if docstring and not docstring.startswith(('"""', 'r"""', 'u"""')):
    -            quotes = "'''" if "'''" in docstring[:4] else "'"
    -            return Error('D300: Expected """-quotes, got %s-quotes' % quotes)
    -
    -    @check_for(Definition)
    -    def check_backslashes(self, definition, docstring):
    -        r'''D301: Use r""" if any backslashes in a docstring.
    -
    -        Use r"""raw triple double quotes""" if you use any backslashes
    -        (\) in your docstrings.
    -
    -        '''
    -        # Just check that docstring is raw, check_triple_double_quotes
    -        # ensures the correct quotes.
    -        if docstring and '\\' in docstring and not docstring.startswith('r'):
    -            return Error()
    -
    -    @check_for(Definition)
    -    def check_unicode_docstring(self, definition, docstring):
    -        r'''D302: Use u""" for docstrings with Unicode.
    -
    -        For Unicode docstrings, use u"""Unicode triple-quoted strings""".
    -
    -        '''
    -        # Just check that docstring is unicode, check_triple_double_quotes
    -        # ensures the correct quotes.
    -        if docstring and sys.version_info[0] <= 2:
    -            if not is_ascii(docstring) and not docstring.startswith('u'):
    -                return Error()
    -
    -    @check_for(Definition)
    -    def check_ends_with_period(self, definition, docstring):
    -        """D400: First line should end with a period.
    -
    -        The [first line of a] docstring is a phrase ending in a period.
    -
    -        """
    -        if docstring:
    -            summary_line = eval(docstring).strip().split('\n')[0]
    -            if not summary_line.endswith('.'):
    -                return Error("D400: First line should end with '.', not %r"
    -                             % summary_line[-1])
    -
    -    @check_for(Function)
    -    def check_imperative_mood(self, function, docstring):  # def context
    -        """D401: First line should be in imperative mood: 'Do', not 'Does'.
    -
    -        [Docstring] prescribes the function or method's effect as a command:
    -        ("Do this", "Return that"), not as a description; e.g. don't write
    -        "Returns the pathname ...".
    -
    -        """
    -        if docstring:
    -            stripped = eval(docstring).strip()
    -            if stripped:
    -                first_word = stripped.split()[0]
    -                if first_word.endswith('s') and not first_word.endswith('ss'):
    -                    return Error('D401: First line should be imperative: '
    -                                 '%r, not %r' % (first_word[:-1], first_word))
    -
    -    @check_for(Function)
    -    def check_no_signature(self, function, docstring):  # def context
    -        """D402: First line should not be function's or method's "signature".
    -
    -        The one-line docstring should NOT be a "signature" reiterating the
    -        function/method parameters (which can be obtained by introspection).
    -
    -        """
    -        if docstring:
    -            first_line = eval(docstring).strip().split('\n')[0]
    -            if function.name + '(' in first_line.replace(' ', ''):
    -                return Error("D402: First line should not be %s's signature"
    -                             % function.kind)
    -
    -    # Somewhat hard to determine if return value is mentioned.
    -    # @check(Function)
    -    def SKIP_check_return_type(self, function, docstring):
    -        """D40x: Return value type should be mentioned.
    -
    -        [T]he nature of the return value cannot be determined by
    -        introspection, so it should be mentioned.
    -
    -        """
    -        if docstring and function.returns_value:
    -            if 'return' not in docstring.lower():
    -                return Error()
    -
    -
    -if __name__ == '__main__':
    -    try:
    -        sys.exit(main(*parse_options()))
    -    except KeyboardInterrupt:
    -        pass
    diff --git a/pymode/libs/pylama/lint/pylama_pep8/__init__.py b/pymode/libs/pylama/lint/pylama_pep8.py
    similarity index 65%
    rename from pymode/libs/pylama/lint/pylama_pep8/__init__.py
    rename to pymode/libs/pylama/lint/pylama_pep8.py
    index a0a4ecb7..30329d80 100644
    --- a/pymode/libs/pylama/lint/pylama_pep8/__init__.py
    +++ b/pymode/libs/pylama/lint/pylama_pep8.py
    @@ -1,6 +1,8 @@
    -""" Check PEP8. """
    -from .. import Linter as BaseLinter
    -from .pep8 import BaseReport, StyleGuide
    +"""PEP8 support."""
    +from pep8 import BaseReport, StyleGuide, get_parser
    +
    +from pylama.lint import Linter as Abstract
    +
     
     try:
         from StringIO import StringIO
    @@ -8,17 +10,23 @@
         from io import StringIO
     
     
    -class Linter(BaseLinter):
    +class Linter(Abstract):
     
    -    """ PEP8 code check. """
    +    """PEP8 runner."""
     
         @staticmethod
         def run(path, code=None, params=None, **meta):
    -        """ PEP8 code checking.
    +        """Check code with PEP8.
     
             :return list: List of errors.
    -
             """
    +        parser = get_parser()
    +        for option in parser.option_list:
    +            if option.dest and option.dest in params:
    +                value = params[option.dest]
    +                if not isinstance(value, str):
    +                    continue
    +                params[option.dest] = option.convert_value(option, params[option.dest])
             P8Style = StyleGuide(reporter=_PEP8Report, **params)
             buf = StringIO(code)
             return P8Style.input_file(path, lines=buf.readlines())
    @@ -31,13 +39,13 @@ def __init__(self, *args, **kwargs):
             self.errors = []
     
         def init_file(self, filename, lines, expected, line_offset):
    -        """ Prepare storage for errors. """
    +        """Prepare storage for errors."""
             super(_PEP8Report, self).init_file(
                 filename, lines, expected, line_offset)
             self.errors = []
     
         def error(self, line_number, offset, text, check):
    -        """ Save errors. """
    +        """Save errors."""
             code = super(_PEP8Report, self).error(
                 line_number, offset, text, check)
     
    @@ -50,7 +58,7 @@ def error(self, line_number, offset, text, check):
                 ))
     
         def get_file_results(self):
    -        """ Get errors.
    +        """Get errors.
     
             :return list: List of errors.
     
    diff --git a/pymode/libs/pylama/lint/pylama_pyflakes.py b/pymode/libs/pylama/lint/pylama_pyflakes.py
    new file mode 100644
    index 00000000..184d969f
    --- /dev/null
    +++ b/pymode/libs/pylama/lint/pylama_pyflakes.py
    @@ -0,0 +1,49 @@
    +"""Pyflakes support."""
    +
    +from pyflakes import checker
    +
    +from pylama.lint import Linter as Abstract
    +
    +
    +checker.messages.UnusedImport.message = "W0611 %r imported but unused"
    +checker.messages.RedefinedWhileUnused.message = "W0404 redefinition of unused %r from line %r"
    +checker.messages.RedefinedInListComp.message = "W0621 list comprehension redefines %r from line %r"
    +checker.messages.ImportShadowedByLoopVar.message = "W0621 import %r from line %r shadowed by loop variable"
    +checker.messages.ImportStarUsed.message = "W0401 'from %s import *' used; unable to detect undefined names"
    +checker.messages.UndefinedName.message = "E0602 undefined name %r"
    +checker.messages.DoctestSyntaxError.message = "W0511 syntax error in doctest"
    +checker.messages.UndefinedExport.message = "E0603 undefined name %r in __all__"
    +checker.messages.UndefinedLocal.message = "E0602 local variable %r (defined in enclosing scope on line %r) referenced before assignment"
    +checker.messages.DuplicateArgument.message = "E1122 duplicate argument %r in function definition"
    +checker.messages.LateFutureImport.message = "W0410 future import(s) %r after other statements"
    +checker.messages.UnusedVariable.message = "W0612 local variable %r is assigned to but never used"
    +checker.messages.ReturnWithArgsInsideGenerator.message = "E0106 'return' with argument inside generator"
    +checker.messages.ReturnOutsideFunction.message = "E0104 'return' outside function"
    +
    +
    +class Linter(Abstract):
    +
    +    """Pyflakes runner."""
    +
    +    @staticmethod
    +    def run(path, code=None, params=None, **meta):
    +        """Check code with pyflakes.
    +
    +        :return list: List of errors.
    +        """
    +        import _ast
    +
    +        builtins = params.get("builtins", "")
    +
    +        if builtins:
    +            builtins = builtins.split(",")
    +
    +        tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST)
    +        w = checker.Checker(tree, path, builtins=builtins)
    +        w.messages = sorted(w.messages, key=lambda m: m.lineno)
    +        return [
    +            {'lnum': m.lineno, 'text': m.message % m.message_args}
    +            for m in sorted(w.messages, key=lambda m: m.lineno)
    +        ]
    +
    +#  pylama:ignore=E501,C0301
    diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py
    deleted file mode 100644
    index 72fc26fe..00000000
    --- a/pymode/libs/pylama/lint/pylama_pyflakes/__init__.py
    +++ /dev/null
    @@ -1,65 +0,0 @@
    -""" Check Pyflakes. """
    -import sys
    -from os import path as op
    -
    -from .. import Linter as BaseLinter
    -
    -
    -# Use local version of pyflakes
    -path = op.dirname(op.abspath(__file__))
    -sys.path.insert(0, path)
    -
    -from pyflakes import checker
    -
    -
    -class Linter(BaseLinter):
    -
    -    """ Pyflakes code check. """
    -
    -    def __init__(self):
    -        if checker.messages.UndefinedName.message != "E0602 undefined name %r":
    -            monkey_patch_messages(checker.messages)
    -
    -    @staticmethod
    -    def run(path, code=None, params=None, **meta):
    -        """ Pyflake code checking.
    -
    -        :return list: List of errors.
    -
    -        """
    -        import _ast
    -
    -        builtins = params.get("builtins", "")
    -
    -        if builtins:
    -            builtins = builtins.split(",")
    -
    -        errors = []
    -        tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST)
    -        w = checker.Checker(tree, path, builtins=builtins)
    -        w.messages = sorted(w.messages, key=lambda m: m.lineno)
    -        for w in w.messages:
    -            errors.append(dict(
    -                lnum=w.lineno,
    -                text=w.message % w.message_args,
    -            ))
    -        return errors
    -
    -
    -def monkey_patch_messages(messages):
    -    """ Patch pyflakes messages. """
    -
    -    messages.LateFutureImport.message = "W0410 future import(s) %r after other statements"
    -    messages.ImportStarUsed.message = "W0401 'from %s import *' used; unable to detect undefined names"
    -    messages.RedefinedWhileUnused.message = "W0404 redefinition of unused %r from line %r"
    -    messages.DoctestSyntaxError.message = "W0511 syntax error in doctest"
    -    messages.UnusedImport.message = "W0611 %r imported but unused"
    -    messages.UnusedVariable.message = "W0612 local variable %r is assigned to but never used"
    -    messages.RedefinedInListComp.message = "W0621 list comprehension redefines %r from line %r"
    -    messages.Redefined.message = "W0621 redefinition of %r from line %r"
    -    messages.ImportShadowedByLoopVar.message = "W0621 import %r from line %r shadowed by loop variable"
    -    messages.ReturnWithArgsInsideGenerator.message = "E0106 'return' with argument inside generator"
    -    messages.UndefinedName.message = "E0602 undefined name %r"
    -    messages.UndefinedLocal.message = "E0602 local variable %r (defined in enclosing scope on line %r) referenced before assignment"
    -    messages.UndefinedExport.message = "E0603 undefined name %r in __all__"
    -    messages.DuplicateArgument.message = "E1122 duplicate argument %r in function definition"
    diff --git a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py b/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py
    deleted file mode 100644
    index cb2b136b..00000000
    --- a/pymode/libs/pylama/lint/pylama_pyflakes/pyflakes/__init__.py
    +++ /dev/null
    @@ -1,2 +0,0 @@
    -
    -__version__ = '0.8.2a0'
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/__init__.py b/pymode/libs/pylama/lint/pylama_pylint/__init__.py
    index 6ec4f3ba..74e6bc22 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/__init__.py
    +++ b/pymode/libs/pylama/lint/pylama_pylint/__init__.py
    @@ -4,14 +4,9 @@
     # ==================
     
     
    -__version__ = "0.3.1"
    +__version__ = "2.1.1"
     __project__ = "pylama_pylint"
     __author__ = "horneds "
     __license__ = "BSD"
     
    -import sys
    -if sys.version_info >= (3, 0, 0):
    -    raise ImportError("pylama_pylint doesnt support python3")
    -
    -from .main import Linter
    -assert Linter
    +from .main import Linter  # noqa
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py
    deleted file mode 100644
    index 0ee0410e..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2qt4.py
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -"""Astroid hooks for the Python 2 qt4 module.
    -
    -Currently help understanding of :
    -
    -* PyQT4.QtCore
    -"""
    -
    -from astroid import MANAGER
    -from astroid.builder import AstroidBuilder
    -
    -
    -def pyqt4_qtcore_transform(module):
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -
    -def SIGNAL(signal_name): pass
    -
    -class QObject(object):
    -    def emit(self, signal): pass
    -''')
    -    for klass in ('QObject',):
    -        module.locals[klass] = fake.locals[klass]
    -
    -
    -import py2stdlib
    -py2stdlib.MODULE_TRANSFORMS['PyQt4.QtCore'] = pyqt4_qtcore_transform
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py
    deleted file mode 100644
    index 6b0ef501..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/astroid/brain/py2stdlib.py
    +++ /dev/null
    @@ -1,252 +0,0 @@
    -"""Astroid hooks for the Python 2 standard library.
    -
    -Currently help understanding of :
    -
    -* hashlib.md5 and hashlib.sha1
    -"""
    -
    -import sys
    -
    -from astroid import MANAGER, AsStringRegexpPredicate, UseInferenceDefault, inference_tip, YES
    -from astroid import exceptions
    -from astroid import nodes
    -from astroid.builder import AstroidBuilder
    -
    -MODULE_TRANSFORMS = {}
    -PY3K = sys.version_info > (3, 0)
    -
    -
    -# module specific transformation functions #####################################
    -
    -def transform(module):
    -    try:
    -        tr = MODULE_TRANSFORMS[module.name]
    -    except KeyError:
    -        pass
    -    else:
    -        tr(module)
    -MANAGER.register_transform(nodes.Module, transform)
    -
    -# module specific transformation functions #####################################
    -
    -def hashlib_transform(module):
    -    template = '''
    -
    -class %s(object):
    -  def __init__(self, value=''): pass
    -  def digest(self):
    -    return u''
    -  def update(self, value): pass
    -  def hexdigest(self):
    -    return u''
    -'''
    -
    -    algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
    -    classes = "".join(template % hashfunc for hashfunc in algorithms)
    -
    -    fake = AstroidBuilder(MANAGER).string_build(classes)
    -
    -    for hashfunc in algorithms:
    -        module.locals[hashfunc] = fake.locals[hashfunc]
    -
    -def collections_transform(module):
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -
    -class defaultdict(dict):
    -    default_factory = None
    -    def __missing__(self, key): pass
    -
    -class deque(object):
    -    maxlen = 0
    -    def __init__(iterable=None, maxlen=None): pass
    -    def append(self, x): pass
    -    def appendleft(self, x): pass
    -    def clear(self): pass
    -    def count(self, x): return 0
    -    def extend(self, iterable): pass
    -    def extendleft(self, iterable): pass
    -    def pop(self): pass
    -    def popleft(self): pass
    -    def remove(self, value): pass
    -    def reverse(self): pass
    -    def rotate(self, n): pass
    -    def __iter__(self): return self
    -
    -''')
    -
    -    for klass in ('deque', 'defaultdict'):
    -        module.locals[klass] = fake.locals[klass]
    -
    -def pkg_resources_transform(module):
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -
    -def resource_exists(package_or_requirement, resource_name):
    -    pass
    -
    -def resource_isdir(package_or_requirement, resource_name):
    -    pass
    -
    -def resource_filename(package_or_requirement, resource_name):
    -    pass
    -
    -def resource_stream(package_or_requirement, resource_name):
    -    pass
    -
    -def resource_string(package_or_requirement, resource_name):
    -    pass
    -
    -def resource_listdir(package_or_requirement, resource_name):
    -    pass
    -
    -def extraction_error():
    -    pass
    -
    -def get_cache_path(archive_name, names=()):
    -    pass
    -
    -def postprocess(tempname, filename):
    -    pass
    -
    -def set_extraction_path(path):
    -    pass
    -
    -def cleanup_resources(force=False):
    -    pass
    -
    -''')
    -
    -    for func_name, func in fake.locals.items():
    -        module.locals[func_name] = func
    -
    -
    -def urlparse_transform(module):
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -
    -def urlparse(url, scheme='', allow_fragments=True):
    -    return ParseResult()
    -
    -class ParseResult(object):
    -    def __init__(self):
    -        self.scheme = ''
    -        self.netloc = ''
    -        self.path = ''
    -        self.params = ''
    -        self.query = ''
    -        self.fragment = ''
    -        self.username = None
    -        self.password = None
    -        self.hostname = None
    -        self.port = None
    -
    -    def geturl(self):
    -        return ''
    -''')
    -
    -    for func_name, func in fake.locals.items():
    -        module.locals[func_name] = func
    -
    -def subprocess_transform(module):
    -    if PY3K:
    -        communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
    -    else:
    -        communicate = ('string', 'string')
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -
    -class Popen(object):
    -    returncode = pid = 0
    -    stdin = stdout = stderr = file()
    -
    -    def __init__(self, args, bufsize=0, executable=None,
    -                 stdin=None, stdout=None, stderr=None,
    -                 preexec_fn=None, close_fds=False, shell=False,
    -                 cwd=None, env=None, universal_newlines=False,
    -                 startupinfo=None, creationflags=0):
    -        pass
    -
    -    def communicate(self, input=None):
    -        return %r
    -    def wait(self):
    -        return self.returncode
    -    def poll(self):
    -        return self.returncode
    -    def send_signal(self, signal):
    -        pass
    -    def terminate(self):
    -        pass
    -    def kill(self):
    -        pass
    -   ''' % (communicate, ))
    -
    -    for func_name, func in fake.locals.items():
    -        module.locals[func_name] = func
    -
    -
    -
    -MODULE_TRANSFORMS['hashlib'] = hashlib_transform
    -MODULE_TRANSFORMS['collections'] = collections_transform
    -MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
    -MODULE_TRANSFORMS['urlparse'] = urlparse_transform
    -MODULE_TRANSFORMS['subprocess'] = subprocess_transform
    -
    -# namedtuple support ###########################################################
    -
    -def infer_named_tuple(node, context=None):
    -    """Specific inference function for namedtuple CallFunc node"""
    -    def infer_first(node):
    -        try:
    -            value = node.infer().next()
    -            if value is YES:
    -                raise UseInferenceDefault()
    -            else:
    -                return value
    -        except StopIteration:
    -            raise InferenceError()
    -
    -    # node is a CallFunc node, class name as first argument and generated class
    -    # attributes as second argument
    -    if len(node.args) != 2:
    -        # something weird here, go back to class implementation
    -        raise UseInferenceDefault()
    -    # namedtuple list of attributes can be a list of strings or a
    -    # whitespace-separate string
    -    try:
    -        name = infer_first(node.args[0]).value
    -        names = infer_first(node.args[1])
    -        try:
    -            attributes = names.value.split()
    -        except AttributeError:
    -            attributes = [infer_first(const).value for const in names.elts]
    -    except (AttributeError, exceptions.InferenceError):
    -        raise UseInferenceDefault()
    -    # we want to return a Class node instance with proper attributes set
    -    class_node = nodes.Class(name, 'docstring')
    -    class_node.parent = node.parent
    -    # set base class=tuple
    -    class_node.bases.append(nodes.Tuple._proxied)
    -    # XXX add __init__(*attributes) method
    -    for attr in attributes:
    -        fake_node = nodes.EmptyNode()
    -        fake_node.parent = class_node
    -        class_node.instance_attrs[attr] = [fake_node]
    -
    -    fake = AstroidBuilder(MANAGER).string_build('''
    -class %(name)s(tuple):
    -    def _asdict(self):
    -        return self.__dict__
    -    @classmethod
    -    def _make(cls, iterable, new=tuple.__new__, len=len):
    -        return new(cls, iterable)
    -    def _replace(_self, **kwds):
    -        result = _self._make(map(kwds.pop, %(fields)r, _self))
    -        if kwds:
    -            raise ValueError('Got unexpected field names: %%r' %% list(kwds))
    -        return result
    -    ''' % {'name': name, 'fields': attributes})
    -    class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
    -    class_node.locals['_make'] = fake.body[0].locals['_make']
    -    class_node.locals['_replace'] = fake.body[0].locals['_replace']
    -    # we use UseInferenceDefault, we can't be a generator so return an iterator
    -    return iter([class_node])
    -
    -MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
    -                           AsStringRegexpPredicate('namedtuple', 'func'))
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py
    deleted file mode 100644
    index d3be5552..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/__pkginfo__.py
    +++ /dev/null
    @@ -1,53 +0,0 @@
    -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
    -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
    -#
    -# This file is part of logilab-common.
    -#
    -# logilab-common is free software: you can redistribute it and/or modify it under
    -# the terms of the GNU Lesser General Public License as published by the Free
    -# Software Foundation, either version 2.1 of the License, or (at your option) any
    -# later version.
    -#
    -# logilab-common is distributed in the hope that it will be useful, but WITHOUT
    -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    -# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
    -# details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License along
    -# with logilab-common.  If not, see .
    -"""logilab.common packaging information"""
    -__docformat__ = "restructuredtext en"
    -import sys
    -import os
    -
    -distname = 'logilab-common'
    -modname = 'common'
    -subpackage_of = 'logilab'
    -subpackage_master = True
    -
    -numversion = (0, 61, 0)
    -version = '.'.join([str(num) for num in numversion])
    -
    -license = 'LGPL' # 2.1 or later
    -description = "collection of low-level Python packages and modules used by Logilab projects"
    -web = "http://www.logilab.org/project/%s" % distname
    -mailinglist = "mailto://python-projects@lists.logilab.org"
    -author = "Logilab"
    -author_email = "contact@logilab.fr"
    -
    -
    -from os.path import join
    -scripts = [join('bin', 'pytest')]
    -include_dirs = [join('test', 'data')]
    -
    -install_requires = []
    -if sys.version_info < (2, 7):
    -    install_requires.append('unittest2 >= 0.5.1')
    -if os.name == 'nt':
    -    install_requires.append('colorama')
    -
    -classifiers = ["Topic :: Utilities",
    -               "Programming Language :: Python",
    -               "Programming Language :: Python :: 2",
    -               "Programming Language :: Python :: 3",
    -               ]
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py b/pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py
    deleted file mode 100644
    index 8983ece9..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/logilab/common/compat.py
    +++ /dev/null
    @@ -1,243 +0,0 @@
    -# pylint: disable=E0601,W0622,W0611
    -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
    -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
    -#
    -# This file is part of logilab-common.
    -#
    -# logilab-common is free software: you can redistribute it and/or modify it under
    -# the terms of the GNU Lesser General Public License as published by the Free
    -# Software Foundation, either version 2.1 of the License, or (at your option) any
    -# later version.
    -#
    -# logilab-common is distributed in the hope that it will be useful, but WITHOUT
    -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    -# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
    -# details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License along
    -# with logilab-common.  If not, see .
    -"""Wrappers around some builtins introduced in python 2.3, 2.4 and
    -2.5, making them available in for earlier versions of python.
    -
    -See another compatibility snippets from other projects:
    -
    -    :mod:`lib2to3.fixes`
    -    :mod:`coverage.backward`
    -    :mod:`unittest2.compatibility`
    -"""
    -
    -from __future__ import generators
    -
    -__docformat__ = "restructuredtext en"
    -
    -import os
    -import sys
    -import types
    -from warnings import warn
    -
    -import __builtin__ as builtins # 2to3 will tranform '__builtin__' to 'builtins'
    -
    -if sys.version_info < (3, 0):
    -    str_to_bytes = str
    -    def str_encode(string, encoding):
    -        if isinstance(string, unicode):
    -            return string.encode(encoding)
    -        return str(string)
    -else:
    -    def str_to_bytes(string):
    -        return str.encode(string)
    -    # we have to ignore the encoding in py3k to be able to write a string into a
    -    # TextIOWrapper or like object (which expect an unicode string)
    -    def str_encode(string, encoding):
    -        return str(string)
    -
    -# XXX callable built-in seems back in all python versions
    -try:
    -    callable = builtins.callable
    -except AttributeError:
    -    from collections import Callable
    -    def callable(something):
    -        return isinstance(something, Callable)
    -    del Callable
    -
    -# See also http://bugs.python.org/issue11776
    -if sys.version_info[0] == 3:
    -    def method_type(callable, instance, klass):
    -        # api change. klass is no more considered
    -        return types.MethodType(callable, instance)
    -else:
    -    # alias types otherwise
    -    method_type = types.MethodType
    -
    -if sys.version_info < (3, 0):
    -    raw_input = raw_input
    -else:
    -    raw_input = input
    -
    -# Pythons 2 and 3 differ on where to get StringIO
    -if sys.version_info < (3, 0):
    -    from cStringIO import StringIO
    -    FileIO = file
    -    BytesIO = StringIO
    -    reload = reload
    -else:
    -    from io import FileIO, BytesIO, StringIO
    -    from imp import reload
    -
    -# Where do pickles come from?
    -try:
    -    import cPickle as pickle
    -except ImportError:
    -    import pickle
    -
    -from logilab.common.deprecation import deprecated
    -
    -from itertools import izip, chain, imap
    -if sys.version_info < (3, 0):# 2to3 will remove the imports
    -    izip = deprecated('izip exists in itertools since py2.3')(izip)
    -    imap = deprecated('imap exists in itertools since py2.3')(imap)
    -chain = deprecated('chain exists in itertools since py2.3')(chain)
    -
    -sum = deprecated('sum exists in builtins since py2.3')(sum)
    -enumerate = deprecated('enumerate exists in builtins since py2.3')(enumerate)
    -frozenset = deprecated('frozenset exists in builtins since py2.4')(frozenset)
    -reversed = deprecated('reversed exists in builtins since py2.4')(reversed)
    -sorted = deprecated('sorted exists in builtins since py2.4')(sorted)
    -max = deprecated('max exists in builtins since py2.4')(max)
    -
    -
    -# Python2.5 builtins
    -try:
    -    any = any
    -    all = all
    -except NameError:
    -    def any(iterable):
    -        """any(iterable) -> bool
    -
    -        Return True if bool(x) is True for any x in the iterable.
    -        """
    -        for elt in iterable:
    -            if elt:
    -                return True
    -        return False
    -
    -    def all(iterable):
    -        """all(iterable) -> bool
    -
    -        Return True if bool(x) is True for all values x in the iterable.
    -        """
    -        for elt in iterable:
    -            if not elt:
    -                return False
    -        return True
    -
    -
    -# Python2.5 subprocess added functions and exceptions
    -try:
    -    from subprocess import Popen
    -except ImportError:
    -    # gae or python < 2.3
    -
    -    class CalledProcessError(Exception):
    -        """This exception is raised when a process run by check_call() returns
    -        a non-zero exit status.  The exit status will be stored in the
    -        returncode attribute."""
    -        def __init__(self, returncode, cmd):
    -            self.returncode = returncode
    -            self.cmd = cmd
    -        def __str__(self):
    -            return "Command '%s' returned non-zero exit status %d" % (self.cmd,
    -    self.returncode)
    -
    -    def call(*popenargs, **kwargs):
    -        """Run command with arguments.  Wait for command to complete, then
    -        return the returncode attribute.
    -
    -        The arguments are the same as for the Popen constructor.  Example:
    -
    -        retcode = call(["ls", "-l"])
    -        """
    -        # workaround: subprocess.Popen(cmd, stdout=sys.stdout) fails
    -        # see http://bugs.python.org/issue1531862
    -        if "stdout" in kwargs:
    -            fileno = kwargs.get("stdout").fileno()
    -            del kwargs['stdout']
    -            return Popen(stdout=os.dup(fileno), *popenargs, **kwargs).wait()
    -        return Popen(*popenargs, **kwargs).wait()
    -
    -    def check_call(*popenargs, **kwargs):
    -        """Run command with arguments.  Wait for command to complete.  If
    -        the exit code was zero then return, otherwise raise
    -        CalledProcessError.  The CalledProcessError object will have the
    -        return code in the returncode attribute.
    -
    -        The arguments are the same as for the Popen constructor.  Example:
    -
    -        check_call(["ls", "-l"])
    -        """
    -        retcode = call(*popenargs, **kwargs)
    -        cmd = kwargs.get("args")
    -        if cmd is None:
    -            cmd = popenargs[0]
    -        if retcode:
    -            raise CalledProcessError(retcode, cmd)
    -        return retcode
    -
    -try:
    -    from os.path import relpath
    -except ImportError: # python < 2.6
    -    from os.path import curdir, abspath, sep, commonprefix, pardir, join
    -    def relpath(path, start=curdir):
    -        """Return a relative version of a path"""
    -
    -        if not path:
    -            raise ValueError("no path specified")
    -
    -        start_list = abspath(start).split(sep)
    -        path_list = abspath(path).split(sep)
    -
    -        # Work out how much of the filepath is shared by start and path.
    -        i = len(commonprefix([start_list, path_list]))
    -
    -        rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
    -        if not rel_list:
    -            return curdir
    -        return join(*rel_list)
    -
    -
    -# XXX don't know why tests don't pass if I don't do that :
    -_real_set, set = set, deprecated('set exists in builtins since py2.4')(set)
    -if (2, 5) <= sys.version_info[:2]:
    -    InheritableSet = _real_set
    -else:
    -    class InheritableSet(_real_set):
    -        """hacked resolving inheritancy issue from old style class in 2.4"""
    -        def __new__(cls, *args, **kwargs):
    -            if args:
    -                new_args = (args[0], )
    -            else:
    -                new_args = ()
    -            obj = _real_set.__new__(cls, *new_args)
    -            obj.__init__(*args, **kwargs)
    -            return obj
    -
    -# XXX shouldn't we remove this and just let 2to3 do his job ?
    -# range or xrange?
    -try:
    -    range = xrange
    -except NameError:
    -    range = range
    -
    -# ConfigParser was renamed to the more-standard configparser
    -try:
    -    import configparser
    -except ImportError:
    -    import ConfigParser as configparser
    -
    -try:
    -    import json
    -except ImportError:
    -    try:
    -        import simplejson as json
    -    except ImportError:
    -        json = None
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/main.py b/pymode/libs/pylama/lint/pylama_pylint/main.py
    index 411ba31d..f50b6647 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/main.py
    +++ b/pymode/libs/pylama/lint/pylama_pylint/main.py
    @@ -1,12 +1,10 @@
     """ Pylint support. """
     from os import path as op, environ
    -import sys
     import logging
     
     from pylama.lint import Linter as BaseLinter
     
     CURDIR = op.abspath(op.dirname(__file__))
    -sys.path.insert(0, CURDIR)
     
     from astroid import MANAGER
     from pylint.lint import Run
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py
    deleted file mode 100644
    index a1c31337..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/stdlib.py
    +++ /dev/null
    @@ -1,69 +0,0 @@
    -# Copyright 2012 Google Inc.
    -#
    -# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    -# This program is free software; you can redistribute it and/or modify it under
    -# the terms of the GNU General Public License as published by the Free Software
    -# Foundation; either version 2 of the License, or (at your option) any later
    -# version.
    -#
    -# This program is distributed in the hope that it will be useful, but WITHOUT
    -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    -#
    -# You should have received a copy of the GNU General Public License along with
    -# this program; if not, write to the Free Software Foundation, Inc.,
    -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    -"""Checkers for various standard library functions."""
    -
    -import re
    -import sys
    -
    -import astroid
    -
    -from pylint.interfaces import IAstroidChecker
    -from pylint.checkers import BaseChecker
    -from pylint.checkers import utils
    -
    -_VALID_OPEN_MODE_REGEX = r'^(r?U|[rwa]\+?b?)$'
    -
    -if sys.version_info >= (3, 0):
    -    OPEN_MODULE = '_io'
    -else:
    -    OPEN_MODULE = '__builtin__'
    -
    -class OpenModeChecker(BaseChecker):
    -    __implements__ = (IAstroidChecker,)
    -    name = 'open_mode'
    -
    -    msgs = {
    -        'W1501': ('"%s" is not a valid mode for open.',
    -                  'bad-open-mode',
    -                  'Python supports: r, w, a modes with b, +, and U options. '
    -                  'See http://docs.python.org/2/library/functions.html#open'),
    -        }
    -
    -    @utils.check_messages('bad-open-mode')
    -    def visit_callfunc(self, node):
    -        """Visit a CallFunc node."""
    -        if hasattr(node, 'func'):
    -            infer = utils.safe_infer(node.func)
    -            if infer and infer.root().name == OPEN_MODULE:
    -                if getattr(node.func, 'name', None) in ('open', 'file'):
    -                    self._check_open_mode(node)
    -
    -    def _check_open_mode(self, node):
    -        """Check that the mode argument of an open or file call is valid."""
    -        try:
    -            mode_arg = utils.get_argument_from_call(node, position=1, keyword='mode')
    -            if mode_arg:
    -                mode_arg = utils.safe_infer(mode_arg)
    -                if (isinstance(mode_arg, astroid.Const)
    -                    and not re.match(_VALID_OPEN_MODE_REGEX, mode_arg.value)):
    -                    self.add_message('bad-open-mode', node=node, args=(mode_arg.value))
    -        except (utils.NoSuchArgumentError, TypeError):
    -            pass
    -
    -def register(linter):
    -    """required method to auto register this checker """
    -    linter.register_checker(OpenModeChecker(linter))
    -
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py b/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py
    deleted file mode 100644
    index 04cf1bc7..00000000
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/strings.py
    +++ /dev/null
    @@ -1,304 +0,0 @@
    -# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
    -# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
    -# Copyright 2012 Google Inc.
    -#
    -# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    -# This program is free software; you can redistribute it and/or modify it under
    -# the terms of the GNU General Public License as published by the Free Software
    -# Foundation; either version 2 of the License, or (at your option) any later
    -# version.
    -#
    -# This program is distributed in the hope that it will be useful, but WITHOUT
    -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    -#
    -# You should have received a copy of the GNU General Public License along with
    -# this program; if not, write to the Free Software Foundation, Inc.,
    -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    -"""Checker for string formatting operations.
    -"""
    -
    -import sys
    -import tokenize
    -
    -import astroid
    -
    -from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
    -from pylint.checkers import BaseChecker, BaseTokenChecker
    -from pylint.checkers import utils
    -from pylint.checkers.utils import check_messages
    -
    -_PY3K = sys.version_info >= (3, 0)
    -
    -MSGS = {
    -    'E1300': ("Unsupported format character %r (%#02x) at index %d",
    -              "bad-format-character",
    -              "Used when a unsupported format character is used in a format\
    -              string."),
    -    'E1301': ("Format string ends in middle of conversion specifier",
    -              "truncated-format-string",
    -              "Used when a format string terminates before the end of a \
    -              conversion specifier."),
    -    'E1302': ("Mixing named and unnamed conversion specifiers in format string",
    -              "mixed-format-string",
    -              "Used when a format string contains both named (e.g. '%(foo)d') \
    -              and unnamed (e.g. '%d') conversion specifiers.  This is also \
    -              used when a named conversion specifier contains * for the \
    -              minimum field width and/or precision."),
    -    'E1303': ("Expected mapping for format string, not %s",
    -              "format-needs-mapping",
    -              "Used when a format string that uses named conversion specifiers \
    -              is used with an argument that is not a mapping."),
    -    'W1300': ("Format string dictionary key should be a string, not %s",
    -              "bad-format-string-key",
    -              "Used when a format string that uses named conversion specifiers \
    -              is used with a dictionary whose keys are not all strings."),
    -    'W1301': ("Unused key %r in format string dictionary",
    -              "unused-format-string-key",
    -              "Used when a format string that uses named conversion specifiers \
    -              is used with a dictionary that conWtains keys not required by the \
    -              format string."),
    -    'E1304': ("Missing key %r in format string dictionary",
    -              "missing-format-string-key",
    -              "Used when a format string that uses named conversion specifiers \
    -              is used with a dictionary that doesn't contain all the keys \
    -              required by the format string."),
    -    'E1305': ("Too many arguments for format string",
    -              "too-many-format-args",
    -              "Used when a format string that uses unnamed conversion \
    -              specifiers is given too many arguments."),
    -    'E1306': ("Not enough arguments for format string",
    -              "too-few-format-args",
    -              "Used when a format string that uses unnamed conversion \
    -              specifiers is given too few arguments"),
    -    }
    -
    -OTHER_NODES = (astroid.Const, astroid.List, astroid.Backquote,
    -               astroid.Lambda, astroid.Function,
    -               astroid.ListComp, astroid.SetComp, astroid.GenExpr)
    -
    -class StringFormatChecker(BaseChecker):
    -    """Checks string formatting operations to ensure that the format string
    -    is valid and the arguments match the format string.
    -    """
    -
    -    __implements__ = (IAstroidChecker,)
    -    name = 'string'
    -    msgs = MSGS
    -
    -    @check_messages(*(MSGS.keys()))
    -    def visit_binop(self, node):
    -        if node.op != '%':
    -            return
    -        left = node.left
    -        args = node.right
    -
    -        if not (isinstance(left, astroid.Const)
    -            and isinstance(left.value, basestring)):
    -            return
    -        format_string = left.value
    -        try:
    -            required_keys, required_num_args = \
    -                utils.parse_format_string(format_string)
    -        except utils.UnsupportedFormatCharacter, e:
    -            c = format_string[e.index]
    -            self.add_message('bad-format-character', node=node, args=(c, ord(c), e.index))
    -            return
    -        except utils.IncompleteFormatString:
    -            self.add_message('truncated-format-string', node=node)
    -            return
    -        if required_keys and required_num_args:
    -            # The format string uses both named and unnamed format
    -            # specifiers.
    -            self.add_message('mixed-format-string', node=node)
    -        elif required_keys:
    -            # The format string uses only named format specifiers.
    -            # Check that the RHS of the % operator is a mapping object
    -            # that contains precisely the set of keys required by the
    -            # format string.
    -            if isinstance(args, astroid.Dict):
    -                keys = set()
    -                unknown_keys = False
    -                for k, _ in args.items:
    -                    if isinstance(k, astroid.Const):
    -                        key = k.value
    -                        if isinstance(key, basestring):
    -                            keys.add(key)
    -                        else:
    -                            self.add_message('bad-format-string-key', node=node, args=key)
    -                    else:
    -                        # One of the keys was something other than a
    -                        # constant.  Since we can't tell what it is,
    -                        # supress checks for missing keys in the
    -                        # dictionary.
    -                        unknown_keys = True
    -                if not unknown_keys:
    -                    for key in required_keys:
    -                        if key not in keys:
    -                            self.add_message('missing-format-string-key', node=node, args=key)
    -                for key in keys:
    -                    if key not in required_keys:
    -                        self.add_message('unused-format-string-key', node=node, args=key)
    -            elif isinstance(args, OTHER_NODES + (astroid.Tuple,)):
    -                type_name = type(args).__name__
    -                self.add_message('format-needs-mapping', node=node, args=type_name)
    -            # else:
    -                # The RHS of the format specifier is a name or
    -                # expression.  It may be a mapping object, so
    -                # there's nothing we can check.
    -        else:
    -            # The format string uses only unnamed format specifiers.
    -            # Check that the number of arguments passed to the RHS of
    -            # the % operator matches the number required by the format
    -            # string.
    -            if isinstance(args, astroid.Tuple):
    -                num_args = len(args.elts)
    -            elif isinstance(args, OTHER_NODES + (astroid.Dict, astroid.DictComp)):
    -                num_args = 1
    -            else:
    -                # The RHS of the format specifier is a name or
    -                # expression.  It could be a tuple of unknown size, so
    -                # there's nothing we can check.
    -                num_args = None
    -            if num_args is not None:
    -                if num_args > required_num_args:
    -                    self.add_message('too-many-format-args', node=node)
    -                elif num_args < required_num_args:
    -                    self.add_message('too-few-format-args', node=node)
    -
    -
    -class StringMethodsChecker(BaseChecker):
    -    __implements__ = (IAstroidChecker,)
    -    name = 'string'
    -    msgs = {
    -        'E1310': ("Suspicious argument in %s.%s call",
    -                  "bad-str-strip-call",
    -                  "The argument to a str.{l,r,}strip call contains a"
    -                  " duplicate character, "),
    -        }
    -
    -    @check_messages(*(MSGS.keys()))
    -    def visit_callfunc(self, node):
    -        func = utils.safe_infer(node.func)
    -        if (isinstance(func, astroid.BoundMethod)
    -            and isinstance(func.bound, astroid.Instance)
    -            and func.bound.name in ('str', 'unicode', 'bytes')
    -            and func.name in ('strip', 'lstrip', 'rstrip')
    -            and node.args):
    -            arg = utils.safe_infer(node.args[0])
    -            if not isinstance(arg, astroid.Const):
    -                return
    -            if len(arg.value) != len(set(arg.value)):
    -                self.add_message('bad-str-strip-call', node=node,
    -                                 args=(func.bound.name, func.name))
    -
    -
    -class StringConstantChecker(BaseTokenChecker):
    -    """Check string literals"""
    -    __implements__ = (ITokenChecker, IRawChecker)
    -    name = 'string_constant'
    -    msgs = {
    -        'W1401': ('Anomalous backslash in string: \'%s\'. '
    -                  'String constant might be missing an r prefix.',
    -                  'anomalous-backslash-in-string',
    -                  'Used when a backslash is in a literal string but not as an '
    -                  'escape.'),
    -        'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
    -                  'String constant might be missing an r or u prefix.',
    -                  'anomalous-unicode-escape-in-string',
    -                  'Used when an escape like \\u is encountered in a byte '
    -                  'string where it has no effect.'),
    -        }
    -
    -    # Characters that have a special meaning after a backslash in either
    -    # Unicode or byte strings.
    -    ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
    -
    -    # TODO(mbp): Octal characters are quite an edge case today; people may
    -    # prefer a separate warning where they occur.  \0 should be allowed.
    -
    -    # Characters that have a special meaning after a backslash but only in
    -    # Unicode strings.
    -    UNICODE_ESCAPE_CHARACTERS = 'uUN'
    -
    -    def process_module(self, module):
    -        self._unicode_literals = 'unicode_literals' in module.future_imports
    -
    -    def process_tokens(self, tokens):
    -        for (tok_type, token, (start_row, start_col), _, _) in tokens:
    -            if tok_type == tokenize.STRING:
    -                # 'token' is the whole un-parsed token; we can look at the start
    -                # of it to see whether it's a raw or unicode string etc.
    -                self.process_string_token(token, start_row, start_col)
    -
    -    def process_string_token(self, token, start_row, start_col):
    -        for i, c in enumerate(token):
    -            if c in '\'\"':
    -                quote_char = c
    -                break
    -        prefix = token[:i].lower() #  markers like u, b, r.
    -        after_prefix = token[i:]
    -        if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
    -            string_body = after_prefix[3:-3]
    -        else:
    -            string_body = after_prefix[1:-1]  # Chop off quotes
    -        # No special checks on raw strings at the moment.
    -        if 'r' not in prefix:
    -            self.process_non_raw_string_token(prefix, string_body,
    -                start_row, start_col)
    -
    -    def process_non_raw_string_token(self, prefix, string_body, start_row,
    -                                     start_col):
    -        """check for bad escapes in a non-raw string.
    -
    -        prefix: lowercase string of eg 'ur' string prefix markers.
    -        string_body: the un-parsed body of the string, not including the quote
    -        marks.
    -        start_row: integer line number in the source.
    -        start_col: integer column number in the source.
    -        """
    -        # Walk through the string; if we see a backslash then escape the next
    -        # character, and skip over it.  If we see a non-escaped character,
    -        # alert, and continue.
    -        #
    -        # Accept a backslash when it escapes a backslash, or a quote, or
    -        # end-of-line, or one of the letters that introduce a special escape
    -        # sequence 
    -        #
    -        # TODO(mbp): Maybe give a separate warning about the rarely-used
    -        # \a \b \v \f?
    -        #
    -        # TODO(mbp): We could give the column of the problem character, but
    -        # add_message doesn't seem to have a way to pass it through at present.
    -        i = 0
    -        while True:
    -            i = string_body.find('\\', i)
    -            if i == -1:
    -                break
    -            # There must be a next character; having a backslash at the end
    -            # of the string would be a SyntaxError.
    -            next_char = string_body[i+1]
    -            match = string_body[i:i+2]
    -            if next_char in self.UNICODE_ESCAPE_CHARACTERS:
    -                if 'u' in prefix:
    -                    pass
    -                elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
    -                    pass  # unicode by default
    -                else:
    -                    self.add_message('anomalous-unicode-escape-in-string', 
    -                                     line=start_row, args=(match, ))
    -            elif next_char not in self.ESCAPE_CHARACTERS:
    -                self.add_message('anomalous-backslash-in-string', 
    -                                 line=start_row, args=(match, ))
    -            # Whether it was a valid escape or not, backslash followed by
    -            # another character can always be consumed whole: the second
    -            # character can never be the start of a new backslash escape.
    -            i += 2
    -
    -
    -
    -def register(linter):
    -    """required method to auto register this checker """
    -    linter.register_checker(StringFormatChecker(linter))
    -    linter.register_checker(StringMethodsChecker(linter))
    -    linter.register_checker(StringConstantChecker(linter))
    diff --git a/pymode/libs/pylama/main.py b/pymode/libs/pylama/main.py
    index 9ce91c37..2cf2e929 100644
    --- a/pymode/libs/pylama/main.py
    +++ b/pymode/libs/pylama/main.py
    @@ -1,4 +1,4 @@
    -""" Pylama's shell support. """
    +"""Pylama's shell support."""
     
     from __future__ import absolute_import, with_statement
     
    @@ -6,11 +6,58 @@
     from os import walk, path as op
     
     from .config import parse_options, CURDIR, setup_logger
    -from .core import LOGGER
    +from .core import LOGGER, run
    +from .async import check_async
    +
    +
    +def check_path(options, rootdir=None, candidates=None, code=None):
    +    """Check path.
    +
    +    :param rootdir: Root directory (for making relative file paths)
    +    :param options: Parsed pylama options (from pylama.config.parse_options)
    +
    +    :returns: (list) Errors list
    +
    +    """
    +    if not candidates:
    +        candidates = []
    +        for path_ in options.paths:
    +            path = op.abspath(path_)
    +            if op.isdir(path):
    +                for root, _, files in walk(path):
    +                    candidates += [op.relpath(op.join(root, f), CURDIR) for f in files]
    +            else:
    +                candidates.append(path)
    +
    +    if rootdir is None:
    +        rootdir = path if op.isdir(path) else op.dirname(path)
    +
    +    paths = []
    +    for path in candidates:
    +
    +        if not options.force and not any(l.allow(path) for _, l in options.linters):
    +            continue
    +
    +        if not op.exists(path):
    +            continue
    +
    +        if options.skip and any(p.match(path) for p in options.skip):
    +            LOGGER.info('Skip path: %s', path)
    +            continue
    +
    +        paths.append(path)
    +
    +    if options.async:
    +        return check_async(paths, options, rootdir)
    +
    +    errors = []
    +    for path in paths:
    +        errors += run(path=path, code=code, rootdir=rootdir, options=options)
    +    return errors
     
     
     def shell(args=None, error=True):
    -    """ Endpoint for console.
    +    """Endpoint for console.
     
         Parse a command arguments, configuration files and run a checkers.
     
    @@ -30,49 +77,20 @@ def shell(args=None, error=True):
             from .hook import install_hook
             return install_hook(options.path)
     
    -    paths = [options.path]
    -
    -    if op.isdir(options.path):
    -        paths = []
    -        for root, _, files in walk(options.path):
    -            paths += [op.relpath(op.join(root, f), CURDIR) for f in files]
    +    return process_paths(options, error=error)
     
    -    return check_files(paths, options, error=error)
     
    +def process_paths(options, candidates=None, error=True):
    +    """Process files and log errors."""
    +    errors = check_path(options, rootdir=CURDIR, candidates=candidates)
     
    -def check_files(paths, options, rootpath=None, error=True):
    -    """ Check files.
    -
    -    :return list: list of errors
    -    :raise SystemExit:
    -
    -    """
    -    from .tasks import async_check_files
    -
    -    if rootpath is None:
    -        rootpath = CURDIR
    -
    -    pattern = "%(rel)s:%(lnum)s:%(col)s: %(text)s"
    +    pattern = "%(filename)s:%(lnum)s:%(col)s: %(text)s"
         if options.format == 'pylint':
    -        pattern = "%(rel)s:%(lnum)s: [%(type)s] %(text)s"
    -
    -    work_paths = []
    -    for path in paths:
    -
    -        if not options.force and not any(l.allow(path) for _, l in options.linters): # noqa
    -            continue
    -
    -        if not op.exists(path):
    -            continue
    -
    -        if options.skip and any(p.match(path) for p in options.skip):
    -            LOGGER.info('Skip path: %s', path)
    -            continue
    -        work_paths.append(path)
    -
    -    errors = async_check_files(work_paths, options, rootpath=rootpath)
    +        pattern = "%(filename)s:%(lnum)s: [%(type)s] %(text)s"
     
         for er in errors:
    +        if options.abspath:
    +            er._info['filename'] = op.abspath(er.filename)
             LOGGER.warning(pattern, er._info)
     
         if error:
    @@ -83,3 +101,5 @@ def check_files(paths, options, rootpath=None, error=True):
     
     if __name__ == '__main__':
         shell()
    +
    +# pylama:ignore=F0001
    diff --git a/pymode/libs/pylama/pytest.py b/pymode/libs/pylama/pytest.py
    index cbfe787d..eeaa58ce 100644
    --- a/pymode/libs/pylama/pytest.py
    +++ b/pymode/libs/pylama/pytest.py
    @@ -3,7 +3,7 @@
     
     from os import path as op
     
    -import py
    +import py # noqa
     import pytest
     
     
    @@ -59,11 +59,12 @@ def setup(self):
                 pytest.skip("file(s) previously passed Pylama checks")
     
         def runtest(self):
    -        call = py.io.StdCapture.call
    -        errors, out, err = call(check_file, self.fspath)
    -        # errors = check_file(self.fspath)
    +        errors = check_file(self.fspath)
             if errors:
    -            raise PylamaError(out, err)
    +            pattern = "%(filename)s:%(lnum)s:%(col)s: %(text)s"
    +            out = "\n".join([pattern % e._info for e in errors])
    +            raise PylamaError(out)
    +
             # update mtime only if test passed
             # otherwise failures would not be re-run next time
             if self.cache:
    @@ -76,11 +77,11 @@ def repr_failure(self, excinfo):
     
     
     def check_file(path):
    -    from pylama.main import parse_options, check_files
    +    from pylama.main import parse_options, process_paths
         from pylama.config import CURDIR
     
         options = parse_options()
         path = op.relpath(str(path), CURDIR)
    -    return check_files([path], options, error=False)
    +    return process_paths(options, candidates=[path], error=False)
     
    -# pylama:ignore=D,E1002,W0212
    +# pylama:ignore=D,E1002,W0212,F0001
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py b/pymode/libs/pylint/__init__.py
    similarity index 96%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py
    rename to pymode/libs/pylint/__init__.py
    index eed1b62f..82e557dc 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__init__.py
    +++ b/pymode/libs/pylint/__init__.py
    @@ -15,6 +15,8 @@
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     import sys
     
    +from .__pkginfo__ import version as __version__
    +
     def run_pylint():
         """run pylint"""
         from pylint.lint import Run
    diff --git a/pymode/libs/pylint/__main__.py b/pymode/libs/pylint/__main__.py
    new file mode 100644
    index 00000000..7716361d
    --- /dev/null
    +++ b/pymode/libs/pylint/__main__.py
    @@ -0,0 +1,3 @@
    +#!/usr/bin/env python
    +import pylint
    +pylint.run_pylint()
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py b/pymode/libs/pylint/__pkginfo__.py
    similarity index 90%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py
    rename to pymode/libs/pylint/__pkginfo__.py
    index 86488fa5..33ae5b64 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/__pkginfo__.py
    +++ b/pymode/libs/pylint/__pkginfo__.py
    @@ -15,18 +15,14 @@
     # this program; if not, write to the Free Software Foundation, Inc.,
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     """pylint packaging information"""
    -import sys
    +from __future__ import absolute_import
     
     modname = distname = 'pylint'
     
    -numversion = (1, 2, 1)
    +numversion = (1, 4, 4)
     version = '.'.join([str(num) for num in numversion])
     
    -if sys.version_info < (2, 6):
    -    install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.1',
    -                        'StringFormat']
    -else:
    -    install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.1']
    +install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.6', 'six']
     
     license = 'GPL'
     description = "python code static checker"
    @@ -71,4 +67,4 @@
                for filename in ('pylint', 'pylint-gui', "symilar", "epylint",
                                 "pyreverse")]
     
    -include_dirs = ['test']
    +include_dirs = [join('pylint', 'test')]
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py b/pymode/libs/pylint/checkers/__init__.py
    similarity index 75%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py
    rename to pymode/libs/pylint/checkers/__init__.py
    index af7965be..51adb4d0 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/__init__.py
    +++ b/pymode/libs/pylint/checkers/__init__.py
    @@ -30,7 +30,9 @@
     12: logging
     13: string_format
     14: string_constant
    -15-50: not yet used: reserved for future internal checkers.
    +15: stdlib
    +16: python3
    +17-50: not yet used: reserved for future internal checkers.
     51-99: perhaps used: reserved for external checkers
     
     The raw_metrics checker has no number associated since it doesn't emit any
    @@ -42,11 +44,12 @@
     import tokenize
     import warnings
     
    -from astroid.utils import ASTWalker
     from logilab.common.configuration import OptionsProviderMixIn
     
     from pylint.reporters import diff_string
     from pylint.utils import register_plugins
    +from pylint.interfaces import UNDEFINED
    +
     
     def table_lines_from_stats(stats, old_stats, columns):
         """get values listed in  from  and ,
    @@ -56,7 +59,7 @@ def table_lines_from_stats(stats, old_stats, columns):
         lines = []
         for m_type in columns:
             new = stats[m_type]
    -        format = str
    +        format = str # pylint: disable=redefined-builtin
             if isinstance(new, float):
                 format = lambda num: '%.3f' % num
             old = old_stats.get(m_type)
    @@ -69,7 +72,7 @@ def table_lines_from_stats(stats, old_stats, columns):
         return lines
     
     
    -class BaseChecker(OptionsProviderMixIn, ASTWalker):
    +class BaseChecker(OptionsProviderMixIn):
         """base class for checkers"""
         # checker name (you may reuse an existing one)
         name = None
    @@ -81,20 +84,21 @@ class BaseChecker(OptionsProviderMixIn, ASTWalker):
         msgs = {}
         # reports issued by this checker
         reports = ()
    +    # mark this checker as enabled or not.
    +    enabled = True
     
         def __init__(self, linter=None):
             """checker instances should have the linter as argument
     
             linter is an object implementing ILinter
             """
    -        ASTWalker.__init__(self, self)
             self.name = self.name.lower()
             OptionsProviderMixIn.__init__(self)
             self.linter = linter
     
    -    def add_message(self, msg_id, line=None, node=None, args=None):
    +    def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
             """add a message of a given type"""
    -        self.linter.add_message(msg_id, line, node, args)
    +        self.linter.add_message(msg_id, line, node, args, confidence)
     
         # dummy methods implementing the IChecker interface
     
    @@ -105,31 +109,6 @@ def close(self):
             """called after visiting project (i.e set of modules)"""
     
     
    -class BaseRawChecker(BaseChecker):
    -    """base class for raw checkers"""
    -
    -    def process_module(self, node):
    -        """process a module
    -
    -        the module's content is accessible via the stream object
    -
    -        stream must implement the readline method
    -        """
    -        warnings.warn("Modules that need access to the tokens should "
    -                      "use the ITokenChecker interface.",
    -                      DeprecationWarning)
    -        stream = node.file_stream
    -        stream.seek(0) # XXX may be removed with astroid > 0.23
    -        if sys.version_info <= (3, 0):
    -            self.process_tokens(tokenize.generate_tokens(stream.readline))
    -        else:
    -            self.process_tokens(tokenize.tokenize(stream.readline))
    -
    -    def process_tokens(self, tokens):
    -        """should be overridden by subclasses"""
    -        raise NotImplementedError()
    -
    -
     class BaseTokenChecker(BaseChecker):
         """Base class for checkers that want to have access to the token stream."""
     
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py b/pymode/libs/pylint/checkers/base.py
    similarity index 64%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py
    rename to pymode/libs/pylint/checkers/base.py
    index 8136d0f3..6ce88251 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/base.py
    +++ b/pymode/libs/pylint/checkers/base.py
    @@ -16,13 +16,21 @@
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     """basic checker for Python code"""
     
    +import collections
    +import itertools
     import sys
    -import astroid
    +import re
    +
    +import six
    +from six.moves import zip  # pylint: disable=redefined-builtin
    +
     from logilab.common.ureports import Table
    -from astroid import are_exclusive, InferenceError
    +
    +import astroid
     import astroid.bases
    +from astroid import are_exclusive, InferenceError
     
    -from pylint.interfaces import IAstroidChecker
    +from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
     from pylint.utils import EmptyReport
     from pylint.reporters import diff_string
     from pylint.checkers import BaseChecker
    @@ -34,12 +42,13 @@
         overrides_a_method,
         safe_infer,
         get_argument_from_call,
    +    has_known_bases,
         NoSuchArgumentError,
    +    is_import_error,
    +    unimplemented_abstract_methods,
         )
     
     
    -import re
    -
     # regex for class/function/variable/constant name
     CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
     MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
    @@ -53,16 +62,43 @@
                         ('__reversed__', ))
     
     PY33 = sys.version_info >= (3, 3)
    -BAD_FUNCTIONS = ['map', 'filter', 'apply']
    +PY3K = sys.version_info >= (3, 0)
    +BAD_FUNCTIONS = ['map', 'filter']
     if sys.version_info < (3, 0):
         BAD_FUNCTIONS.append('input')
    -    BAD_FUNCTIONS.append('file')
     
     # Name categories that are always consistent with all naming conventions.
     EXEMPT_NAME_CATEGORIES = set(('exempt', 'ignore'))
     
    +# A mapping from builtin-qname -> symbol, to be used when generating messages
    +# about dangerous default values as arguments
    +DEFAULT_ARGUMENT_SYMBOLS = dict(
    +    zip(['.'.join([astroid.bases.BUILTINS, x]) for x in ('set', 'dict', 'list')],
    +        ['set()', '{}', '[]'])
    +)
    +
     del re
     
    +def _redefines_import(node):
    +    """ Detect that the given node (AssName) is inside an
    +    exception handler and redefines an import from the tryexcept body.
    +    Returns True if the node redefines an import, False otherwise.
    +    """
    +    current = node
    +    while current and not isinstance(current.parent, astroid.ExceptHandler):
    +        current = current.parent
    +    if not current or not is_import_error(current.parent):
    +        return False
    +    try_block = current.parent.parent
    +    for import_node in try_block.nodes_of_class((astroid.From, astroid.Import)):
    +        for name, alias in import_node.names:
    +            if alias:
    +                if alias == node.name:
    +                    return True
    +            elif name == node.name:
    +                return True
    +    return False
    +
     def in_loop(node):
         """return True if the node is inside a kind of for loop"""
         parent = node.parent
    @@ -93,6 +129,7 @@ def _loop_exits_early(loop):
         for child in loop.body:
             if isinstance(child, loop_nodes):
                 # break statement may be in orelse of child loop.
    +            # pylint: disable=superfluous-parens
                 for orelse in (child.orelse or ()):
                     for _ in orelse.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
                         return True
    @@ -101,12 +138,18 @@ def _loop_exits_early(loop):
                 return True
         return False
     
    +def _is_multi_naming_match(match, node_type, confidence):
    +    return (match is not None and
    +            match.lastgroup is not None and
    +            match.lastgroup not in EXEMPT_NAME_CATEGORIES
    +            and (node_type != 'method' or confidence != INFERENCE_FAILURE))
    +
    +
     if sys.version_info < (3, 0):
         PROPERTY_CLASSES = set(('__builtin__.property', 'abc.abstractproperty'))
     else:
         PROPERTY_CLASSES = set(('builtins.property', 'abc.abstractproperty'))
    -ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
    -                   'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
    +
     
     def _determine_function_name_type(node):
         """Determine the name type whose regex the a function's name should match.
    @@ -124,8 +167,8 @@ def _determine_function_name_type(node):
             # If the function is a property (decorated with @property
             # or @abc.abstractproperty), the name type is 'attr'.
             if (isinstance(decorator, astroid.Name) or
    -            (isinstance(decorator, astroid.Getattr) and
    -             decorator.attrname == 'abstractproperty')):
    +                (isinstance(decorator, astroid.Getattr) and
    +                 decorator.attrname == 'abstractproperty')):
                 infered = safe_infer(decorator)
                 if infered and infered.qname() in PROPERTY_CLASSES:
                     return 'attr'
    @@ -136,25 +179,17 @@ def _determine_function_name_type(node):
                 return 'attr'
         return 'method'
     
    -def decorated_with_abc(func):
    -    """ Determine if the `func` node is decorated
    -    with `abc` decorators (abstractmethod et co.)
    +
    +
    +def _has_abstract_methods(node):
         """
    -    if func.decorators:
    -        for node in func.decorators.nodes:
    -            try:
    -                infered = node.infer().next()
    -            except InferenceError:
    -                continue
    -            if infered and infered.qname() in ABC_METHODS:
    -                return True
    +    Determine if the given `node` has abstract methods.
     
    -def has_abstract_methods(node):
    -    """ Determine if the given `node` has
    -    abstract methods, defined with `abc` module.
    +    The methods should be made abstract by decorating them
    +    with `abc` decorators.
         """
    -    return any(decorated_with_abc(meth)
    -               for meth in node.mymethods())
    +    return len(unimplemented_abstract_methods(node)) > 0
    +
     
     def report_by_type_stats(sect, stats, old_stats):
         """make a report of
    @@ -208,7 +243,7 @@ def x(self, value): self._x = value
         if node.decorators:
             for decorator in node.decorators.nodes:
                 if (isinstance(decorator, astroid.Getattr) and
    -                getattr(decorator.expr, 'name', None) == node.name):
    +                    getattr(decorator.expr, 'name', None) == node.name):
                     return True
         return False
     
    @@ -218,57 +253,52 @@ class _BasicChecker(BaseChecker):
     
     class BasicErrorChecker(_BasicChecker):
         msgs = {
    -    'E0100': ('__init__ method is a generator',
    -              'init-is-generator',
    -              'Used when the special class method __init__ is turned into a '
    -              'generator by a yield in its body.'),
    -    'E0101': ('Explicit return in __init__',
    -              'return-in-init',
    -              'Used when the special class method __init__ has an explicit \
    -              return value.'),
    -    'E0102': ('%s already defined line %s',
    -              'function-redefined',
    -              'Used when a function / class / method is redefined.'),
    -    'E0103': ('%r not properly in loop',
    -              'not-in-loop',
    -              'Used when break or continue keywords are used outside a loop.'),
    -
    -    'E0104': ('Return outside function',
    -              'return-outside-function',
    -              'Used when a "return" statement is found outside a function or '
    -              'method.'),
    -    'E0105': ('Yield outside function',
    -              'yield-outside-function',
    -              'Used when a "yield" statement is found outside a function or '
    -              'method.'),
    -    'E0106': ('Return with argument inside generator',
    -              'return-arg-in-generator',
    -              'Used when a "return" statement with an argument is found '
    -              'outside in a generator function or method (e.g. with some '
    -              '"yield" statements).',
    -              {'maxversion': (3, 3)}),
    -    'E0107': ("Use of the non-existent %s operator",
    -              'nonexistent-operator',
    -              "Used when you attempt to use the C-style pre-increment or"
    -              "pre-decrement operator -- and ++, which doesn't exist in Python."),
    -    'E0108': ('Duplicate argument name %s in function definition',
    -              'duplicate-argument-name',
    -              'Duplicate argument names in function definitions are syntax'
    -              ' errors.'),
    -    'E0110': ('Abstract class with abstract methods instantiated',
    -              'abstract-class-instantiated',
    -              'Used when an abstract class with `abc.ABCMeta` as metaclass '
    -              'has abstract methods and is instantiated.',
    -              {'minversion': (3, 0)}),
    -    'W0120': ('Else clause on loop without a break statement',
    -              'useless-else-on-loop',
    -              'Loops should only have an else clause if they can exit early '
    -              'with a break statement, otherwise the statements under else '
    -              'should be on the same scope as the loop itself.'),
    -    }
    -
    -    def __init__(self, linter):
    -        _BasicChecker.__init__(self, linter)
    +        'E0100': ('__init__ method is a generator',
    +                  'init-is-generator',
    +                  'Used when the special class method __init__ is turned into a '
    +                  'generator by a yield in its body.'),
    +        'E0101': ('Explicit return in __init__',
    +                  'return-in-init',
    +                  'Used when the special class method __init__ has an explicit '
    +                  'return value.'),
    +        'E0102': ('%s already defined line %s',
    +                  'function-redefined',
    +                  'Used when a function / class / method is redefined.'),
    +        'E0103': ('%r not properly in loop',
    +                  'not-in-loop',
    +                  'Used when break or continue keywords are used outside a loop.'),
    +        'E0104': ('Return outside function',
    +                  'return-outside-function',
    +                  'Used when a "return" statement is found outside a function or '
    +                  'method.'),
    +        'E0105': ('Yield outside function',
    +                  'yield-outside-function',
    +                  'Used when a "yield" statement is found outside a function or '
    +                  'method.'),
    +        'E0106': ('Return with argument inside generator',
    +                  'return-arg-in-generator',
    +                  'Used when a "return" statement with an argument is found '
    +                  'outside in a generator function or method (e.g. with some '
    +                  '"yield" statements).',
    +                  {'maxversion': (3, 3)}),
    +        'E0107': ("Use of the non-existent %s operator",
    +                  'nonexistent-operator',
    +                  "Used when you attempt to use the C-style pre-increment or"
    +                  "pre-decrement operator -- and ++, which doesn't exist in Python."),
    +        'E0108': ('Duplicate argument name %s in function definition',
    +                  'duplicate-argument-name',
    +                  'Duplicate argument names in function definitions are syntax'
    +                  ' errors.'),
    +        'E0110': ('Abstract class %r with abstract methods instantiated',
    +                  'abstract-class-instantiated',
    +                  'Used when an abstract class with `abc.ABCMeta` as metaclass '
    +                  'has abstract methods and is instantiated.'),
    +        'W0120': ('Else clause on loop without a break statement',
    +                  'useless-else-on-loop',
    +                  'Loops should only have an else clause if they can exit early '
    +                  'with a break statement, otherwise the statements under else '
    +                  'should be on the same scope as the loop itself.'),
    +        }
     
         @check_messages('function-redefined')
         def visit_class(self, node):
    @@ -289,11 +319,11 @@ def visit_function(self, node):
                 else:
                     values = [r.value for r in returns]
                     # Are we returning anything but None from constructors
    -                if  [v for v in values if
    -                     not (v is None or
    -                          (isinstance(v, astroid.Const) and v.value is None) or
    -                          (isinstance(v, astroid.Name)  and v.name == 'None')
    -                          )]:
    +                if [v for v in values
    +                        if not (v is None or
    +                                (isinstance(v, astroid.Const) and v.value is None) or
    +                                (isinstance(v, astroid.Name)  and v.name == 'None')
    +                               )]:
                         self.add_message('return-in-init', node=node)
             elif node.is_generator():
                 # make sure we don't mix non-None returns and yields
    @@ -342,38 +372,39 @@ def visit_while(self, node):
         def visit_unaryop(self, node):
             """check use of the non-existent ++ and -- operator operator"""
             if ((node.op in '+-') and
    -            isinstance(node.operand, astroid.UnaryOp) and
    -            (node.operand.op == node.op)):
    +                isinstance(node.operand, astroid.UnaryOp) and
    +                (node.operand.op == node.op)):
                 self.add_message('nonexistent-operator', node=node, args=node.op*2)
     
         @check_messages('abstract-class-instantiated')
         def visit_callfunc(self, node):
             """ Check instantiating abstract class with
    -        abc.ABCMeta as metaclass. 
    +        abc.ABCMeta as metaclass.
             """
             try:
    -            infered = node.func.infer().next()
    +            infered = next(node.func.infer())
             except astroid.InferenceError:
                 return
             if not isinstance(infered, astroid.Class):
                 return
             # __init__ was called
             metaclass = infered.metaclass()
    +        abstract_methods = _has_abstract_methods(infered)
             if metaclass is None:
                 # Python 3.4 has `abc.ABC`, which won't be detected
                 # by ClassNode.metaclass()
                 for ancestor in infered.ancestors():
    -                if (ancestor.qname() == 'abc.ABC' and
    -                    has_abstract_methods(infered)):
    -
    -                    self.add_message('abstract-class-instantiated', node=node)
    +                if ancestor.qname() == 'abc.ABC' and abstract_methods:
    +                    self.add_message('abstract-class-instantiated',
    +                                     args=(infered.name, ),
    +                                     node=node)
                         break
                 return
    -        if (metaclass.qname() == 'abc.ABCMeta' and
    -            has_abstract_methods(infered)):
    +        if metaclass.qname() == 'abc.ABCMeta' and abstract_methods:
    +            self.add_message('abstract-class-instantiated',
    +                             args=(infered.name, ),
    +                             node=node)
     
    -            self.add_message('abstract-class-instantiated', node=node)
    -   
         def _check_else_on_loop(self, node):
             """Check that any loop with an else clause has a break statement."""
             if node.orelse and not _loop_exits_early(node):
    @@ -417,88 +448,78 @@ class BasicChecker(_BasicChecker):
     
         name = 'basic'
         msgs = {
    -    'W0101': ('Unreachable code',
    -              'unreachable',
    -              'Used when there is some code behind a "return" or "raise" \
    -              statement, which will never be accessed.'),
    -    'W0102': ('Dangerous default value %s as argument',
    -              'dangerous-default-value',
    -              'Used when a mutable value as list or dictionary is detected in \
    -              a default value for an argument.'),
    -    'W0104': ('Statement seems to have no effect',
    -              'pointless-statement',
    -              'Used when a statement doesn\'t have (or at least seems to) \
    -              any effect.'),
    -    'W0105': ('String statement has no effect',
    -              'pointless-string-statement',
    -              'Used when a string is used as a statement (which of course \
    -              has no effect). This is a particular case of W0104 with its \
    -              own message so you can easily disable it if you\'re using \
    -              those strings as documentation, instead of comments.'),
    -    'W0106': ('Expression "%s" is assigned to nothing',
    -              'expression-not-assigned',
    -              'Used when an expression that is not a function call is assigned\
    -              to nothing. Probably something else was intended.'),
    -    'W0108': ('Lambda may not be necessary',
    -              'unnecessary-lambda',
    -              'Used when the body of a lambda expression is a function call \
    -              on the same argument list as the lambda itself; such lambda \
    -              expressions are in all but a few cases replaceable with the \
    -              function being called in the body of the lambda.'),
    -    'W0109': ("Duplicate key %r in dictionary",
    -              'duplicate-key',
    -              "Used when a dictionary expression binds the same key multiple \
    -              times."),
    -    'W0122': ('Use of exec',
    -              'exec-used',
    -              'Used when you use the "exec" statement (function for Python 3), to discourage its \
    -              usage. That doesn\'t mean you can not use it !'),
    -    'W0123': ('Use of eval',
    -              'eval-used',
    -              'Used when you use the "eval" function, to discourage its '
    -              'usage. Consider using `ast.literal_eval` for safely evaluating '
    -              'strings containing Python expressions '
    -              'from untrusted sources. '),
    -    'W0141': ('Used builtin function %r',
    -              'bad-builtin',
    -              'Used when a black listed builtin function is used (see the '
    -              'bad-function option). Usual black listed functions are the ones '
    -              'like map, or filter , where Python offers now some cleaner '
    -              'alternative like list comprehension.'),
    -    'W0142': ('Used * or ** magic',
    -              'star-args',
    -              'Used when a function or method is called using `*args` or '
    -              '`**kwargs` to dispatch arguments. This doesn\'t improve '
    -              'readability and should be used with care.'),
    -    'W0150': ("%s statement in finally block may swallow exception",
    -              'lost-exception',
    -              "Used when a break or a return statement is found inside the \
    -              finally clause of a try...finally block: the exceptions raised \
    -              in the try clause will be silently swallowed instead of being \
    -              re-raised."),
    -    'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
    -              'assert-on-tuple',
    -              'A call of assert on a tuple will always evaluate to true if '
    -              'the tuple is not empty, and will always evaluate to false if '
    -              'it is.'),
    -    'W0121': ('Use raise ErrorClass(args) instead of raise ErrorClass, args.',
    -              'old-raise-syntax',
    -              "Used when the alternate raise syntax 'raise foo, bar' is used "
    -              "instead of 'raise foo(bar)'.",
    -              {'maxversion': (3, 0)}),
    -
    -    'C0121': ('Missing required attribute "%s"', # W0103
    -              'missing-module-attribute',
    -              'Used when an attribute required for modules is missing.'),
    -
    -    'E0109': ('Missing argument to reversed()',
    -              'missing-reversed-argument',
    -              'Used when reversed() builtin didn\'t receive an argument.'),
    -    'E0111': ('The first reversed() argument is not a sequence',
    -              'bad-reversed-sequence',
    -              'Used when the first argument to reversed() builtin '
    -              'isn\'t a sequence (does not implement __reversed__, '
    -              'nor __getitem__ and __len__'),
    +        'W0101': ('Unreachable code',
    +                  'unreachable',
    +                  'Used when there is some code behind a "return" or "raise" '
    +                  'statement, which will never be accessed.'),
    +        'W0102': ('Dangerous default value %s as argument',
    +                  'dangerous-default-value',
    +                  'Used when a mutable value as list or dictionary is detected in '
    +                  'a default value for an argument.'),
    +        'W0104': ('Statement seems to have no effect',
    +                  'pointless-statement',
    +                  'Used when a statement doesn\'t have (or at least seems to) '
    +                  'any effect.'),
    +        'W0105': ('String statement has no effect',
    +                  'pointless-string-statement',
    +                  'Used when a string is used as a statement (which of course '
    +                  'has no effect). This is a particular case of W0104 with its '
    +                  'own message so you can easily disable it if you\'re using '
    +                  'those strings as documentation, instead of comments.'),
    +        'W0106': ('Expression "%s" is assigned to nothing',
    +                  'expression-not-assigned',
    +                  'Used when an expression that is not a function call is assigned '
    +                  'to nothing. Probably something else was intended.'),
    +        'W0108': ('Lambda may not be necessary',
    +                  'unnecessary-lambda',
    +                  'Used when the body of a lambda expression is a function call '
    +                  'on the same argument list as the lambda itself; such lambda '
    +                  'expressions are in all but a few cases replaceable with the '
    +                  'function being called in the body of the lambda.'),
    +        'W0109': ("Duplicate key %r in dictionary",
    +                  'duplicate-key',
    +                  'Used when a dictionary expression binds the same key multiple '
    +                  'times.'),
    +        'W0122': ('Use of exec',
    +                  'exec-used',
    +                  'Used when you use the "exec" statement (function for Python '
    +                  '3), to discourage its usage. That doesn\'t '
    +                  'mean you can not use it !'),
    +        'W0123': ('Use of eval',
    +                  'eval-used',
    +                  'Used when you use the "eval" function, to discourage its '
    +                  'usage. Consider using `ast.literal_eval` for safely evaluating '
    +                  'strings containing Python expressions '
    +                  'from untrusted sources. '),
    +        'W0141': ('Used builtin function %r',
    +                  'bad-builtin',
    +                  'Used when a black listed builtin function is used (see the '
    +                  'bad-function option). Usual black listed functions are the ones '
    +                  'like map, or filter , where Python offers now some cleaner '
    +                  'alternative like list comprehension.'),
    +        'W0150': ("%s statement in finally block may swallow exception",
    +                  'lost-exception',
    +                  'Used when a break or a return statement is found inside the '
    +                  'finally clause of a try...finally block: the exceptions raised '
    +                  'in the try clause will be silently swallowed instead of being '
    +                  're-raised.'),
    +        'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
    +                  'assert-on-tuple',
    +                  'A call of assert on a tuple will always evaluate to true if '
    +                  'the tuple is not empty, and will always evaluate to false if '
    +                  'it is.'),
    +        'C0121': ('Missing required attribute "%s"', # W0103
    +                  'missing-module-attribute',
    +                  'Used when an attribute required for modules is missing.'),
    +
    +        'E0109': ('Missing argument to reversed()',
    +                  'missing-reversed-argument',
    +                  'Used when reversed() builtin didn\'t receive an argument.'),
    +        'E0111': ('The first reversed() argument is not a sequence',
    +                  'bad-reversed-sequence',
    +                  'Used when the first argument to reversed() builtin '
    +                  'isn\'t a sequence (does not implement __reversed__, '
    +                  'nor __getitem__ and __len__'),
     
         }
     
    @@ -507,14 +528,14 @@ class BasicChecker(_BasicChecker):
                      'metavar' : '',
                      'help' : 'Required attributes for module, separated by a '
                               'comma'}
    -                ),
    +               ),
                    ('bad-functions',
                     {'default' : BAD_FUNCTIONS,
                      'type' :'csv', 'metavar' : '',
                      'help' : 'List of builtins function names that should not be '
                               'used, separated by a comma'}
    -                ),
    -               )
    +               ),
    +              )
         reports = (('RP0101', 'Statistics by type', report_by_type_stats),)
     
         def __init__(self, linter):
    @@ -528,6 +549,7 @@ def open(self):
             self._tryfinallys = []
             self.stats = self.linter.add_stats(module=0, function=0,
                                                method=0, class_=0)
    +
         @check_messages('missing-module-attribute')
         def visit_module(self, node):
             """check module name, docstring and required arguments
    @@ -537,7 +559,7 @@ def visit_module(self, node):
                 if attr not in node:
                     self.add_message('missing-module-attribute', node=node, args=attr)
     
    -    def visit_class(self, node):
    +    def visit_class(self, node): # pylint: disable=unused-argument
             """check module name, docstring and redefinition
             increment branch counter
             """
    @@ -549,8 +571,20 @@ def visit_discard(self, node):
             """check for various kind of statements without effect"""
             expr = node.value
             if isinstance(expr, astroid.Const) and isinstance(expr.value,
    -                                                        basestring):
    +                                                          six.string_types):
                 # treat string statement in a separated message
    +            # Handle PEP-257 attribute docstrings.
    +            # An attribute docstring is defined as being a string right after
    +            # an assignment at the module level, class level or __init__ level.
    +            scope = expr.scope()
    +            if isinstance(scope, (astroid.Class, astroid.Module, astroid.Function)):
    +                if isinstance(scope, astroid.Function) and scope.name != '__init__':
    +                    pass
    +                else:
    +                    sibling = expr.previous_sibling()
    +                    if (sibling is not None and sibling.scope() is scope and
    +                            isinstance(sibling, astroid.Assign)):
    +                        return
                 self.add_message('pointless-string-statement', node=node)
                 return
             # ignore if this is :
    @@ -560,11 +594,12 @@ def visit_discard(self, node):
             # warn W0106 if we have any underlying function call (we can't predict
             # side effects), else pointless-statement
             if (isinstance(expr, (astroid.Yield, astroid.CallFunc)) or
    -            (isinstance(node.parent, astroid.TryExcept) and
    -             node.parent.body == [node])):
    +                (isinstance(node.parent, astroid.TryExcept) and
    +                 node.parent.body == [node])):
                 return
             if any(expr.nodes_of_class(astroid.CallFunc)):
    -            self.add_message('expression-not-assigned', node=node, args=expr.as_string())
    +            self.add_message('expression-not-assigned', node=node,
    +                             args=expr.as_string())
             else:
                 self.add_message('pointless-statement', node=node)
     
    @@ -597,15 +632,15 @@ def visit_lambda(self, node):
             ordinary_args = list(node.args.args)
             if node.args.kwarg:
                 if (not call.kwargs
    -                or not isinstance(call.kwargs, astroid.Name)
    -                or node.args.kwarg != call.kwargs.name):
    +                    or not isinstance(call.kwargs, astroid.Name)
    +                    or node.args.kwarg != call.kwargs.name):
                     return
             elif call.kwargs:
                 return
             if node.args.vararg:
                 if (not call.starargs
    -                or not isinstance(call.starargs, astroid.Name)
    -                or node.args.vararg != call.starargs.name):
    +                    or not isinstance(call.starargs, astroid.Name)
    +                    or node.args.vararg != call.starargs.name):
                     return
             elif call.starargs:
                 return
    @@ -613,11 +648,16 @@ def visit_lambda(self, node):
             # ordinary_args[i].name == call.args[i].name.
             if len(ordinary_args) != len(call.args):
                 return
    -        for i in xrange(len(ordinary_args)):
    +        for i in range(len(ordinary_args)):
                 if not isinstance(call.args[i], astroid.Name):
                     return
                 if node.args.args[i].name != call.args[i].name:
                     return
    +        if (isinstance(node.body.func, astroid.Getattr) and
    +                isinstance(node.body.func.expr, astroid.CallFunc)):
    +            # Chained call, the intermediate call might
    +            # return something else (but we don't check that, yet).
    +            return
             self.add_message('unnecessary-lambda', line=node.fromlineno, node=node)
     
         @check_messages('dangerous-default-value')
    @@ -626,22 +666,45 @@ def visit_function(self, node):
             variable names, max locals
             """
             self.stats[node.is_method() and 'method' or 'function'] += 1
    +        self._check_dangerous_default(node)
    +
    +    def _check_dangerous_default(self, node):
             # check for dangerous default values as arguments
    +        is_iterable = lambda n: isinstance(n, (astroid.List,
    +                                               astroid.Set,
    +                                               astroid.Dict))
             for default in node.args.defaults:
                 try:
    -                value = default.infer().next()
    +                value = next(default.infer())
                 except astroid.InferenceError:
                     continue
    -            builtins = astroid.bases.BUILTINS
    +
                 if (isinstance(value, astroid.Instance) and
    -                value.qname() in ['.'.join([builtins, x]) for x in ('set', 'dict', 'list')]):
    +                    value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
    +
                     if value is default:
    -                    msg = default.as_string()
    -                elif type(value) is astroid.Instance:
    -                    msg = '%s (%s)' % (default.as_string(), value.qname())
    +                    msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
    +                elif type(value) is astroid.Instance or is_iterable(value):
    +                    # We are here in the following situation(s):
    +                    #   * a dict/set/list/tuple call which wasn't inferred
    +                    #     to a syntax node ({}, () etc.). This can happen
    +                    #     when the arguments are invalid or unknown to
    +                    #     the inference.
    +                    #   * a variable from somewhere else, which turns out to be a list
    +                    #     or a dict.
    +                    if is_iterable(default):
    +                        msg = value.pytype()
    +                    elif isinstance(default, astroid.CallFunc):
    +                        msg = '%s() (%s)' % (value.name, value.qname())
    +                    else:
    +                        msg = '%s (%s)' % (default.as_string(), value.qname())
                     else:
    -                    msg = '%s (%s)' % (default.as_string(), value.as_string())
    -                self.add_message('dangerous-default-value', node=node, args=(msg,))
    +                    # this argument is a name
    +                    msg = '%s (%s)' % (default.as_string(),
    +                                       DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
    +                self.add_message('dangerous-default-value',
    +                                 node=node,
    +                                 args=(msg, ))
     
         @check_messages('unreachable', 'lost-exception')
         def visit_return(self, node):
    @@ -673,24 +736,20 @@ def visit_break(self, node):
             # 2 - Is it inside final body of a try...finally bloc ?
             self._check_not_in_finally(node, 'break', (astroid.For, astroid.While,))
     
    -    @check_messages('unreachable', 'old-raise-syntax')
    +    @check_messages('unreachable')
         def visit_raise(self, node):
             """check if the node has a right sibling (if so, that's some unreachable
             code)
             """
             self._check_unreachable(node)
    -        if sys.version_info >= (3, 0):
    -            return
    -        if node.exc is not None and node.inst is not None and node.tback is None:
    -            self.add_message('old-raise-syntax', node=node)
     
         @check_messages('exec-used')
         def visit_exec(self, node):
             """just print a warning on exec statements"""
             self.add_message('exec-used', node=node)
     
    -    @check_messages('bad-builtin', 'star-args', 'eval-used', 
    -                    'exec-used', 'missing-reversed-argument', 
    +    @check_messages('bad-builtin', 'eval-used',
    +                    'exec-used', 'missing-reversed-argument',
                         'bad-reversed-sequence')
         def visit_callfunc(self, node):
             """visit a CallFunc node -> check if this is not a blacklisted builtin
    @@ -710,24 +769,12 @@ def visit_callfunc(self, node):
                         self.add_message('eval-used', node=node)
                     if name in self.config.bad_functions:
                         self.add_message('bad-builtin', node=node, args=name)
    -        if node.starargs or node.kwargs:
    -            scope = node.scope()
    -            if isinstance(scope, astroid.Function):
    -                toprocess = [(n, vn) for (n, vn) in ((node.starargs, scope.args.vararg),
    -                                                     (node.kwargs, scope.args.kwarg)) if n]
    -                if toprocess:
    -                    for cfnode, fargname in toprocess[:]:
    -                        if getattr(cfnode, 'name', None) == fargname:
    -                            toprocess.remove((cfnode, fargname))
    -                    if not toprocess:
    -                        return # star-args can be skipped
    -            self.add_message('star-args', node=node.func)
     
         @check_messages('assert-on-tuple')
         def visit_assert(self, node):
             """check the use of an assert statement on a tuple."""
             if node.fail is None and isinstance(node.test, astroid.Tuple) and \
    -           len(node.test.elts) == 2:
    +                len(node.test.elts) == 2:
                 self.add_message('assert-on-tuple', node=node)
     
         @check_messages('duplicate-key')
    @@ -745,7 +792,7 @@ def visit_tryfinally(self, node):
             """update try...finally flag"""
             self._tryfinallys.append(node)
     
    -    def leave_tryfinally(self, node):
    +    def leave_tryfinally(self, node): # pylint: disable=unused-argument
             """update try...finally flag"""
             self._tryfinallys.pop()
     
    @@ -772,7 +819,7 @@ def _check_not_in_finally(self, node, node_name, breaker_classes=()):
                     return
                 _node = _parent
                 _parent = _node.parent
    -    
    +
         def _check_reversed(self, node):
             """ check that the argument to `reversed` is a sequence """
             try:
    @@ -783,25 +830,25 @@ def _check_reversed(self, node):
                 if argument is astroid.YES:
                     return
                 if argument is None:
    -                # nothing was infered
    -                # try to see if we have iter()
    +                # Nothing was infered.
    +                # Try to see if we have iter().
                     if isinstance(node.args[0], astroid.CallFunc):
                         try:
    -                        func = node.args[0].func.infer().next()
    +                        func = next(node.args[0].func.infer())
                         except InferenceError:
                             return
                         if (getattr(func, 'name', None) == 'iter' and
    -                        is_builtin_object(func)):
    +                            is_builtin_object(func)):
                             self.add_message('bad-reversed-sequence', node=node)
                     return
     
                 if isinstance(argument, astroid.Instance):
    -                if (argument._proxied.name == 'dict' and 
    -                    is_builtin_object(argument._proxied)):
    -                     self.add_message('bad-reversed-sequence', node=node)
    -                     return
    +                if (argument._proxied.name == 'dict' and
    +                        is_builtin_object(argument._proxied)):
    +                    self.add_message('bad-reversed-sequence', node=node)
    +                    return
                     elif any(ancestor.name == 'dict' and is_builtin_object(ancestor)
    -                       for ancestor in argument._proxied.ancestors()):
    +                         for ancestor in argument._proxied.ancestors()):
                         # mappings aren't accepted by reversed()
                         self.add_message('bad-reversed-sequence', node=node)
                         return
    @@ -814,10 +861,10 @@ def _check_reversed(self, node):
                                 break
                         else:
                             break
    -                else:             
    -                    # check if it is a .deque. It doesn't seem that
    -                    # we can retrieve special methods 
    -                    # from C implemented constructs    
    +                else:
    +                    # Check if it is a .deque. It doesn't seem that
    +                    # we can retrieve special methods
    +                    # from C implemented constructs.
                         if argument._proxied.qname().endswith(".deque"):
                             return
                         self.add_message('bad-reversed-sequence', node=node)
    @@ -840,62 +887,61 @@ def _check_reversed(self, node):
     
     def _create_naming_options():
         name_options = []
    -    for name_type, (rgx, human_readable_name) in _NAME_TYPES.iteritems():
    +    for name_type, (rgx, human_readable_name) in six.iteritems(_NAME_TYPES):
             name_type = name_type.replace('_', '-')
             name_options.append((
    -            '%s-rgx' % (name_type,), 
    +            '%s-rgx' % (name_type,),
                 {'default': rgx, 'type': 'regexp', 'metavar': '',
                  'help': 'Regular expression matching correct %s names' % (human_readable_name,)}))
             name_options.append((
    -            '%s-name-hint' % (name_type,), 
    +            '%s-name-hint' % (name_type,),
                 {'default': rgx.pattern, 'type': 'string', 'metavar': '',
                  'help': 'Naming hint for %s names' % (human_readable_name,)}))
    -
    -    return tuple(name_options) 
    +    return tuple(name_options)
     
     class NameChecker(_BasicChecker):
         msgs = {
    -    'C0102': ('Black listed name "%s"',
    -              'blacklisted-name',
    -              'Used when the name is listed in the black list (unauthorized \
    -              names).'),
    -    'C0103': ('Invalid %s name "%s"%s',
    -              'invalid-name',
    -              'Used when the name doesn\'t match the regular expression \
    -              associated to its type (constant, variable, class...).'),
    +        'C0102': ('Black listed name "%s"',
    +                  'blacklisted-name',
    +                  'Used when the name is listed in the black list (unauthorized '
    +                  'names).'),
    +        'C0103': ('Invalid %s name "%s"%s',
    +                  'invalid-name',
    +                  'Used when the name doesn\'t match the regular expression '
    +                  'associated to its type (constant, variable, class...).'),
         }
     
    -    options = (# XXX use set
    -               ('good-names',
    +    options = (('good-names',
                     {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
                      'type' :'csv', 'metavar' : '',
                      'help' : 'Good variable names which should always be accepted,'
                               ' separated by a comma'}
    -                ),
    +               ),
                    ('bad-names',
                     {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
                      'type' :'csv', 'metavar' : '',
                      'help' : 'Bad variable names which should always be refused, '
                               'separated by a comma'}
    -                ),
    +               ),
                    ('name-group',
                     {'default' : (),
                      'type' :'csv', 'metavar' : '',
                      'help' : ('Colon-delimited sets of names that determine each'
                                ' other\'s naming style when the name regexes'
                                ' allow several styles.')}
    -                ),
    +               ),
                    ('include-naming-hint',
                     {'default': False, 'type' : 'yn', 'metavar' : '',
                      'help': 'Include a hint for the correct naming format with invalid-name'}
    -                ),
    -               ) + _create_naming_options()
    +               ),
    +              ) + _create_naming_options()
     
     
         def __init__(self, linter):
             _BasicChecker.__init__(self, linter)
             self._name_category = {}
             self._name_group = {}
    +        self._bad_names = {}
     
         def open(self):
             self.stats = self.linter.add_stats(badname_module=0,
    @@ -913,11 +959,30 @@ def open(self):
         @check_messages('blacklisted-name', 'invalid-name')
         def visit_module(self, node):
             self._check_name('module', node.name.split('.')[-1], node)
    +        self._bad_names = {}
    +
    +    def leave_module(self, node): # pylint: disable=unused-argument
    +        for all_groups in six.itervalues(self._bad_names):
    +            if len(all_groups) < 2:
    +                continue
    +            groups = collections.defaultdict(list)
    +            min_warnings = sys.maxsize
    +            for group in six.itervalues(all_groups):
    +                groups[len(group)].append(group)
    +                min_warnings = min(len(group), min_warnings)
    +            if len(groups[min_warnings]) > 1:
    +                by_line = sorted(groups[min_warnings],
    +                                 key=lambda group: min(warning[0].lineno for warning in group))
    +                warnings = itertools.chain(*by_line[1:])
    +            else:
    +                warnings = groups[min_warnings][0]
    +            for args in warnings:
    +                self._raise_name_warning(*args)
     
         @check_messages('blacklisted-name', 'invalid-name')
         def visit_class(self, node):
             self._check_name('class', node.name, node)
    -        for attr, anodes in node.instance_attrs.iteritems():
    +        for attr, anodes in six.iteritems(node.instance_attrs):
                 if not list(node.instance_attr_ancestors(attr)):
                     self._check_name('attr', attr, anodes[0])
     
    @@ -925,10 +990,15 @@ def visit_class(self, node):
         def visit_function(self, node):
             # Do not emit any warnings if the method is just an implementation
             # of a base class method.
    -        if node.is_method() and overrides_a_method(node.parent.frame(), node.name):
    -            return
    +        confidence = HIGH
    +        if node.is_method():
    +            if overrides_a_method(node.parent.frame(), node.name):
    +                return
    +            confidence = (INFERENCE if has_known_bases(node.parent.frame())
    +                          else INFERENCE_FAILURE)
    +
             self._check_name(_determine_function_name_type(node),
    -                         node.name, node)
    +                         node.name, node, confidence)
             # Check argument names
             args = node.args.args
             if args is not None:
    @@ -951,13 +1021,17 @@ def visit_assname(self, node):
                     if isinstance(safe_infer(ass_type.value), astroid.Class):
                         self._check_name('class', node.name, node)
                     else:
    -                    self._check_name('const', node.name, node)
    +                    if not _redefines_import(node):
    +                        # Don't emit if the name redefines an import
    +                        # in an ImportError except handler.
    +                        self._check_name('const', node.name, node)
                 elif isinstance(ass_type, astroid.ExceptHandler):
                     self._check_name('variable', node.name, node)
             elif isinstance(frame, astroid.Function):
                 # global introduced variable aren't in the function locals
                 if node.name in frame and node.name not in frame.argnames():
    -                self._check_name('variable', node.name, node)
    +                if not _redefines_import(node):
    +                    self._check_name('variable', node.name, node)
             elif isinstance(frame, astroid.Class):
                 if not list(frame.local_attr_ancestors(node.name)):
                     self._check_name('class_attribute', node.name, node)
    @@ -973,12 +1047,16 @@ def _recursive_check_names(self, args, node):
         def _find_name_group(self, node_type):
             return self._name_group.get(node_type, node_type)
     
    -    def _is_multi_naming_match(self, match):
    -        return (match is not None and
    -                match.lastgroup is not None and
    -                match.lastgroup not in EXEMPT_NAME_CATEGORIES)
    +    def _raise_name_warning(self, node, node_type, name, confidence):
    +        type_label = _NAME_TYPES[node_type][1]
    +        hint = ''
    +        if self.config.include_naming_hint:
    +            hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
    +        self.add_message('invalid-name', node=node, args=(type_label, name, hint),
    +                         confidence=confidence)
    +        self.stats['badname_' + node_type] += 1
     
    -    def _check_name(self, node_type, name, node):
    +    def _check_name(self, node_type, name, node, confidence=HIGH):
             """check for a name using the type's regexp"""
             if is_inside_except(node):
                 clobbering, _ = clobber_in_except(node)
    @@ -993,48 +1071,42 @@ def _check_name(self, node_type, name, node):
             regexp = getattr(self.config, node_type + '_rgx')
             match = regexp.match(name)
     
    -        if self._is_multi_naming_match(match):
    +        if _is_multi_naming_match(match, node_type, confidence):
                 name_group = self._find_name_group(node_type)
    -            if name_group not in self._name_category:
    -                self._name_category[name_group] = match.lastgroup
    -            elif self._name_category[name_group] != match.lastgroup:
    -                match = None
    +            bad_name_group = self._bad_names.setdefault(name_group, {})
    +            warnings = bad_name_group.setdefault(match.lastgroup, [])
    +            warnings.append((node, node_type, name, confidence))
     
             if match is None:
    -            type_label = _NAME_TYPES[node_type][1]
    -            hint = ''
    -            if self.config.include_naming_hint:
    -                hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
    -            self.add_message('invalid-name', node=node, args=(type_label, name, hint))
    -            self.stats['badname_' + node_type] += 1
    +            self._raise_name_warning(node, node_type, name, confidence)
     
     
     class DocStringChecker(_BasicChecker):
         msgs = {
    -    'C0111': ('Missing %s docstring', # W0131
    -              'missing-docstring',
    -              'Used when a module, function, class or method has no docstring.\
    -              Some special methods like __init__ doesn\'t necessary require a \
    -              docstring.'),
    -    'C0112': ('Empty %s docstring', # W0132
    -              'empty-docstring',
    -              'Used when a module, function, class or method has an empty \
    -              docstring (it would be too easy ;).'),
    -    }
    +        'C0111': ('Missing %s docstring', # W0131
    +                  'missing-docstring',
    +                  'Used when a module, function, class or method has no docstring.'
    +                  'Some special methods like __init__ doesn\'t necessary require a '
    +                  'docstring.'),
    +        'C0112': ('Empty %s docstring', # W0132
    +                  'empty-docstring',
    +                  'Used when a module, function, class or method has an empty '
    +                  'docstring (it would be too easy ;).'),
    +        }
         options = (('no-docstring-rgx',
                     {'default' : NO_REQUIRED_DOC_RGX,
                      'type' : 'regexp', 'metavar' : '',
                      'help' : 'Regular expression which should only match '
                               'function or class names that do not require a '
                               'docstring.'}
    -                ),
    +               ),
                    ('docstring-min-length',
                     {'default' : -1,
                      'type' : 'int', 'metavar' : '',
                      'help': ('Minimum line length for functions/classes that'
                               ' require docstrings, shorter ones are exempt.')}
    -                ),
    -               )
    +               ),
    +              )
     
     
         def open(self):
    @@ -1050,12 +1122,15 @@ def visit_module(self, node):
         def visit_class(self, node):
             if self.config.no_docstring_rgx.match(node.name) is None:
                 self._check_docstring('class', node)
    +
         @check_messages('missing-docstring', 'empty-docstring')
         def visit_function(self, node):
             if self.config.no_docstring_rgx.match(node.name) is None:
                 ftype = node.is_method() and 'method' or 'function'
                 if isinstance(node.parent.frame(), astroid.Class):
                     overridden = False
    +                confidence = (INFERENCE if has_known_bases(node.parent.frame())
    +                              else INFERENCE_FAILURE)
                     # check if node is from a method overridden by its ancestor
                     for ancestor in node.parent.frame().ancestors():
                         if node.name in ancestor and \
    @@ -1063,11 +1138,13 @@ def visit_function(self, node):
                             overridden = True
                             break
                     self._check_docstring(ftype, node,
    -                                      report_missing=not overridden)
    +                                      report_missing=not overridden,
    +                                      confidence=confidence)
                 else:
                     self._check_docstring(ftype, node)
     
    -    def _check_docstring(self, node_type, node, report_missing=True):
    +    def _check_docstring(self, node_type, node, report_missing=True,
    +                         confidence=HIGH):
             """check the node has a non empty docstring"""
             docstring = node.doc
             if docstring is None:
    @@ -1077,15 +1154,33 @@ def _check_docstring(self, node_type, node, report_missing=True):
                     lines = node.body[-1].lineno - node.body[0].lineno + 1
                 else:
                     lines = 0
    +
    +            if node_type == 'module' and not lines:
    +                # If the module has no body, there's no reason
    +                # to require a docstring.
    +                return
                 max_lines = self.config.docstring_min_length
     
                 if node_type != 'module' and max_lines > -1 and lines < max_lines:
                     return
                 self.stats['undocumented_'+node_type] += 1
    -            self.add_message('missing-docstring', node=node, args=(node_type,))
    +            if (node.body and isinstance(node.body[0], astroid.Discard) and
    +                    isinstance(node.body[0].value, astroid.CallFunc)):
    +                # Most likely a string with a format call. Let's see.
    +                func = safe_infer(node.body[0].value.func)
    +                if (isinstance(func, astroid.BoundMethod)
    +                        and isinstance(func.bound, astroid.Instance)):
    +                    # Strings in Python 3, others in Python 2.
    +                    if PY3K and func.bound.name == 'str':
    +                        return
    +                    elif func.bound.name in ('str', 'unicode', 'bytes'):
    +                        return
    +            self.add_message('missing-docstring', node=node, args=(node_type,),
    +                             confidence=confidence)
             elif not docstring.strip():
                 self.stats['undocumented_'+node_type] += 1
    -            self.add_message('empty-docstring', node=node, args=(node_type,))
    +            self.add_message('empty-docstring', node=node, args=(node_type,),
    +                             confidence=confidence)
     
     
     class PassChecker(_BasicChecker):
    @@ -1094,7 +1189,7 @@ class PassChecker(_BasicChecker):
                           'unnecessary-pass',
                           'Used when a "pass" statement that can be avoided is '
                           'encountered.'),
    -            }
    +           }
         @check_messages('unnecessary-pass')
         def visit_pass(self, node):
             if len(node.parent.child_sequence(node)) > 1:
    @@ -1114,7 +1209,7 @@ class LambdaForComprehensionChecker(_BasicChecker):
                           '"filter". It could be clearer as a list '
                           'comprehension or generator expression.',
                           {'maxversion': (3, 0)}),
    -            }
    +           }
     
         @check_messages('deprecated-lambda')
         def visit_callfunc(self, node):
    @@ -1127,7 +1222,7 @@ def visit_callfunc(self, node):
                 return
             infered = safe_infer(node.func)
             if (is_builtin_object(infered)
    -            and infered.name in ['map', 'filter']):
    +                and infered.name in ['map', 'filter']):
                 self.add_message('deprecated-lambda', node=node)
     
     
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py b/pymode/libs/pylint/checkers/classes.py
    similarity index 65%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py
    rename to pymode/libs/pylint/checkers/classes.py
    index f5e2783f..87e3bcfe 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/classes.py
    +++ b/pymode/libs/pylint/checkers/classes.py
    @@ -18,15 +18,20 @@
     from __future__ import generators
     
     import sys
    +from collections import defaultdict
     
     import astroid
    -from astroid import YES, Instance, are_exclusive, AssAttr
    -from astroid.bases import Generator
    +from astroid import YES, Instance, are_exclusive, AssAttr, Class
    +from astroid.bases import Generator, BUILTINS
    +from astroid.inference import InferenceContext
     
     from pylint.interfaces import IAstroidChecker
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import (PYMETHODS, overrides_a_method,
    -    check_messages, is_attr_private, is_attr_protected, node_frame_class)
    +from pylint.checkers.utils import (
    +    PYMETHODS, overrides_a_method, check_messages, is_attr_private,
    +    is_attr_protected, node_frame_class, safe_infer, is_builtin_object,
    +    decorated_with_property, unimplemented_abstract_methods)
    +import six
     
     if sys.version_info >= (3, 0):
         NEXT_METHOD = '__next__'
    @@ -34,6 +39,32 @@
         NEXT_METHOD = 'next'
     ITER_METHODS = ('__iter__', '__getitem__')
     
    +def _called_in_methods(func, klass, methods):
    +    """ Check if the func was called in any of the given methods,
    +    belonging to the *klass*. Returns True if so, False otherwise.
    +    """
    +    if not isinstance(func, astroid.Function):
    +        return False
    +    for method in methods:
    +        try:
    +            infered = klass.getattr(method)
    +        except astroid.NotFoundError:
    +            continue
    +        for infer_method in infered:
    +            for callfunc in infer_method.nodes_of_class(astroid.CallFunc):
    +                try:
    +                    bound = next(callfunc.func.infer())
    +                except (astroid.InferenceError, StopIteration):
    +                    continue
    +                if not isinstance(bound, astroid.BoundMethod):
    +                    continue
    +                func_obj = bound._proxied
    +                if isinstance(func_obj, astroid.UnboundMethod):
    +                    func_obj = func_obj._proxied
    +                if func_obj.name == func.name:
    +                    return True
    +    return False
    +
     def class_is_abstract(node):
         """return true if the given class node should be considered as an abstract
         class
    @@ -44,13 +75,41 @@ def class_is_abstract(node):
                     return True
         return False
     
    +def _is_attribute_property(name, klass):
    +    """ Check if the given attribute *name* is a property
    +    in the given *klass*.
    +
    +    It will look for `property` calls or for functions
    +    with the given name, decorated by `property` or `property`
    +    subclasses.
    +    Returns ``True`` if the name is a property in the given klass,
    +    ``False`` otherwise.
    +    """
    +
    +    try:
    +        attributes = klass.getattr(name)
    +    except astroid.NotFoundError:
    +        return False
    +    property_name = "{0}.property".format(BUILTINS)
    +    for attr in attributes:
    +        try:
    +            infered = next(attr.infer())
    +        except astroid.InferenceError:
    +            continue
    +        if (isinstance(infered, astroid.Function) and
    +                decorated_with_property(infered)):
    +            return True
    +        if infered.pytype() == property_name:
    +            return True
    +    return False
    +
     
     MSGS = {
         'F0202': ('Unable to check methods signature (%s / %s)',
                   'method-check-failed',
    -              'Used when PyLint has been unable to check methods signature \
    -              compatibility for an unexpected reason. Please report this kind \
    -              if you don\'t make sense of it.'),
    +              'Used when Pylint has been unable to check methods signature '
    +              'compatibility for an unexpected reason. Please report this kind '
    +              'if you don\'t make sense of it.'),
     
         'E0202': ('An attribute defined in %s line %s hides this method',
                   'method-hidden',
    @@ -59,35 +118,35 @@ def class_is_abstract(node):
                   'client code.'),
         'E0203': ('Access to member %r before its definition line %s',
                   'access-member-before-definition',
    -              'Used when an instance member is accessed before it\'s actually\
    -              assigned.'),
    +              'Used when an instance member is accessed before it\'s actually '
    +              'assigned.'),
         'W0201': ('Attribute %r defined outside __init__',
                   'attribute-defined-outside-init',
    -              'Used when an instance attribute is defined outside the __init__\
    -              method.'),
    +              'Used when an instance attribute is defined outside the __init__ '
    +              'method.'),
     
         'W0212': ('Access to a protected member %s of a client class', # E0214
                   'protected-access',
    -              'Used when a protected member (i.e. class member with a name \
    -              beginning with an underscore) is access outside the class or a \
    -              descendant of the class where it\'s defined.'),
    +              'Used when a protected member (i.e. class member with a name '
    +              'beginning with an underscore) is access outside the class or a '
    +              'descendant of the class where it\'s defined.'),
     
         'E0211': ('Method has no argument',
                   'no-method-argument',
    -              'Used when a method which should have the bound instance as \
    -              first argument has no argument defined.'),
    +              'Used when a method which should have the bound instance as '
    +              'first argument has no argument defined.'),
         'E0213': ('Method should have "self" as first argument',
                   'no-self-argument',
    -              'Used when a method has an attribute different the "self" as\
    -              first argument. This is considered as an error since this is\
    -              a so common convention that you shouldn\'t break it!'),
    -    'C0202': ('Class method %s should have %s as first argument', # E0212
    +              'Used when a method has an attribute different the "self" as '
    +              'first argument. This is considered as an error since this is '
    +              'a so common convention that you shouldn\'t break it!'),
    +    'C0202': ('Class method %s should have %s as first argument',
                   'bad-classmethod-argument',
                   'Used when a class method has a first argument named differently '
                   'than the value specified in valid-classmethod-first-arg option '
                   '(default to "cls"), recommended to easily differentiate them '
                   'from regular instance methods.'),
    -    'C0203': ('Metaclass method %s should have %s as first argument', # E0214
    +    'C0203': ('Metaclass method %s should have %s as first argument',
                   'bad-mcs-method-argument',
                   'Used when a metaclass method has a first agument named '
                   'differently than the value specified in valid-classmethod-first'
    @@ -105,69 +164,77 @@ def class_is_abstract(node):
                   'Used when a static method has "self" or a value specified in '
                   'valid-classmethod-first-arg option or '
                   'valid-metaclass-classmethod-first-arg option as first argument.'
    -              ),
    +             ),
         'R0201': ('Method could be a function',
                   'no-self-use',
    -              'Used when a method doesn\'t use its bound instance, and so could\
    -              be written as a function.'
    -              ),
    +              'Used when a method doesn\'t use its bound instance, and so could '
    +              'be written as a function.'
    +             ),
     
         'E0221': ('Interface resolved to %s is not a class',
                   'interface-is-not-class',
    -              'Used when a class claims to implement an interface which is not \
    -              a class.'),
    +              'Used when a class claims to implement an interface which is not '
    +              'a class.'),
         'E0222': ('Missing method %r from %s interface',
                   'missing-interface-method',
    -              'Used when a method declared in an interface is missing from a \
    -              class implementing this interface'),
    -    'W0221': ('Arguments number differs from %s method',
    +              'Used when a method declared in an interface is missing from a '
    +              'class implementing this interface'),
    +    'W0221': ('Arguments number differs from %s %r method',
                   'arguments-differ',
    -              'Used when a method has a different number of arguments than in \
    -              the implemented interface or in an overridden method.'),
    -    'W0222': ('Signature differs from %s method',
    +              'Used when a method has a different number of arguments than in '
    +              'the implemented interface or in an overridden method.'),
    +    'W0222': ('Signature differs from %s %r method',
                   'signature-differs',
    -              'Used when a method signature is different than in the \
    -              implemented interface or in an overridden method.'),
    +              'Used when a method signature is different than in the '
    +              'implemented interface or in an overridden method.'),
         'W0223': ('Method %r is abstract in class %r but is not overridden',
                   'abstract-method',
    -              'Used when an abstract method (i.e. raise NotImplementedError) is \
    -              not overridden in concrete class.'
    -              ),
    -    'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224
    +              'Used when an abstract method (i.e. raise NotImplementedError) is '
    +              'not overridden in concrete class.'
    +             ),
    +    'F0220': ('failed to resolve interfaces implemented by %s (%s)',
                   'unresolved-interface',
    -              'Used when a PyLint as failed to find interfaces implemented by \
    -               a class'),
    +              'Used when a Pylint as failed to find interfaces implemented by '
    +              ' a class'),
     
     
         'W0231': ('__init__ method from base class %r is not called',
                   'super-init-not-called',
    -              'Used when an ancestor class method has an __init__ method \
    -              which is not called by a derived class.'),
    +              'Used when an ancestor class method has an __init__ method '
    +              'which is not called by a derived class.'),
         'W0232': ('Class has no __init__ method',
                   'no-init',
    -              'Used when a class has no __init__ method, neither its parent \
    -              classes.'),
    +              'Used when a class has no __init__ method, neither its parent '
    +              'classes.'),
         'W0233': ('__init__ method from a non direct base class %r is called',
                   'non-parent-init-called',
    -              'Used when an __init__ method is called on a class which is not \
    -              in the direct ancestors for the analysed class.'),
    +              'Used when an __init__ method is called on a class which is not '
    +              'in the direct ancestors for the analysed class.'),
         'W0234': ('__iter__ returns non-iterator',
                   'non-iterator-returned',
    -              'Used when an __iter__ method returns something which is not an \
    -               iterable (i.e. has no `%s` method)' % NEXT_METHOD),
    +              'Used when an __iter__ method returns something which is not an '
    +               'iterable (i.e. has no `%s` method)' % NEXT_METHOD),
         'E0235': ('__exit__ must accept 3 arguments: type, value, traceback',
                   'bad-context-manager',
    -              'Used when the __exit__ special method, belonging to a \
    -               context manager, does not accept 3 arguments \
    -               (type, value, traceback).'),
    +              'Used when the __exit__ special method, belonging to a '
    +              'context manager, does not accept 3 arguments '
    +              '(type, value, traceback).'),
         'E0236': ('Invalid object %r in __slots__, must contain '
                   'only non empty strings',
                   'invalid-slots-object',
                   'Used when an invalid (non-string) object occurs in __slots__.'),
    +    'E0237': ('Assigning to attribute %r not defined in class slots',
    +              'assigning-non-slot',
    +              'Used when assigning to an attribute not defined '
    +              'in the class slots.'),
         'E0238': ('Invalid __slots__ object',
                   'invalid-slots',
                   'Used when an invalid __slots__ is found in class. '
    -              'Only a string, an iterable or a sequence is permitted.')
    +              'Only a string, an iterable or a sequence is permitted.'),
    +    'E0239': ('Inheriting %r, which is not a class.',
    +              'inherit-non-class',
    +              'Used when a class inherits from something which is not a '
    +              'class.'),
     
     
         }
    @@ -193,45 +260,52 @@ class ClassChecker(BaseChecker):
         # configuration options
         options = (('ignore-iface-methods',
                     {'default' : (#zope interface
    -        'isImplementedBy', 'deferred', 'extends', 'names',
    -        'namesAndDescriptions', 'queryDescriptionFor', 'getBases',
    -        'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
    -        'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
    -        'isImplementedByInstancesOf',
    -        # twisted
    -        'adaptWith',
    -        # logilab.common interface
    -        'is_implemented_by'),
    +                    'isImplementedBy', 'deferred', 'extends', 'names',
    +                    'namesAndDescriptions', 'queryDescriptionFor', 'getBases',
    +                    'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
    +                    'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
    +                    'isImplementedByInstancesOf',
    +                    # twisted
    +                    'adaptWith',
    +                    # logilab.common interface
    +                    'is_implemented_by'),
                      'type' : 'csv',
                      'metavar' : '',
                      'help' : 'List of interface methods to ignore, \
     separated by a comma. This is used for instance to not check methods defines \
     in Zope\'s Interface base class.'}
    -                ),
    -
    +               ),
                    ('defining-attr-methods',
                     {'default' : ('__init__', '__new__', 'setUp'),
                      'type' : 'csv',
                      'metavar' : '',
                      'help' : 'List of method names used to declare (i.e. assign) \
     instance attributes.'}
    -                ),
    +               ),
                    ('valid-classmethod-first-arg',
                     {'default' : ('cls',),
                      'type' : 'csv',
                      'metavar' : '',
                      'help' : 'List of valid names for the first argument in \
     a class method.'}
    -                ),
    +               ),
                    ('valid-metaclass-classmethod-first-arg',
                     {'default' : ('mcs',),
                      'type' : 'csv',
                      'metavar' : '',
                      'help' : 'List of valid names for the first argument in \
     a metaclass class method.'}
    -                ),
    -
    -               )
    +               ),
    +               ('exclude-protected',
    +                {
    +                    'default': (
    +                        # namedtuple public API.
    +                        '_asdict', '_fields', '_replace', '_source', '_make'),
    +                    'type': 'csv',
    +                    'metavar': '',
    +                    'help': ('List of member names, which should be excluded '
    +                             'from the protected access warning.')}
    +               ))
     
         def __init__(self, linter=None):
             BaseChecker.__init__(self, linter)
    @@ -242,7 +316,7 @@ def __init__(self, linter=None):
         def visit_class(self, node):
             """init visit variable _accessed and check interfaces
             """
    -        self._accessed.append({})
    +        self._accessed.append(defaultdict(list))
             self._check_bases_classes(node)
             self._check_interfaces(node)
             # if not an interface, exception, metaclass
    @@ -252,8 +326,27 @@ def visit_class(self, node):
                 except astroid.NotFoundError:
                     self.add_message('no-init', args=node, node=node)
             self._check_slots(node)
    +        self._check_proper_bases(node)
    +
    +    @check_messages('inherit-non-class')
    +    def _check_proper_bases(self, node):
    +        """
    +        Detect that a class inherits something which is not
    +        a class or a type.
    +        """
    +        for base in node.bases:
    +            ancestor = safe_infer(base)
    +            if ancestor in (YES, None):
    +                continue
    +            if (isinstance(ancestor, astroid.Instance) and
    +                    ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
    +                continue
    +            if not isinstance(ancestor, astroid.Class):
    +                self.add_message('inherit-non-class',
    +                                 args=base.as_string(), node=node)
     
    -    @check_messages('access-member-before-definition', 'attribute-defined-outside-init')
    +    @check_messages('access-member-before-definition',
    +                    'attribute-defined-outside-init')
         def leave_class(self, cnode):
             """close a class node:
             check that instance attributes are defined in __init__ and check
    @@ -267,33 +360,45 @@ def leave_class(self, cnode):
             if not self.linter.is_message_enabled('attribute-defined-outside-init'):
                 return
             defining_methods = self.config.defining_attr_methods
    -        for attr, nodes in cnode.instance_attrs.iteritems():
    +        current_module = cnode.root()
    +        for attr, nodes in six.iteritems(cnode.instance_attrs):
    +            # skip nodes which are not in the current module and it may screw up
    +            # the output, while it's not worth it
                 nodes = [n for n in nodes if not
    -                    isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))]
    +                     isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
    +                     and n.root() is current_module]
                 if not nodes:
                     continue # error detected by typechecking
    -            attr_defined = False
                 # check if any method attr is defined in is a defining method
    -            for node in nodes:
    -                if node.frame().name in defining_methods:
    -                    attr_defined = True
    -            if not attr_defined:
    -                # check attribute is defined in a parent's __init__
    -                for parent in cnode.instance_attr_ancestors(attr):
    -                    attr_defined = False
    -                    # check if any parent method attr is defined in is a defining method
    -                    for node in parent.instance_attrs[attr]:
    -                        if node.frame().name in defining_methods:
    -                            attr_defined = True
    -                    if attr_defined:
    -                        # we're done :)
    -                        break
    -                else:
    -                    # check attribute is defined as a class attribute
    -                    try:
    -                        cnode.local_attr(attr)
    -                    except astroid.NotFoundError:
    -                        self.add_message('attribute-defined-outside-init', args=attr, node=node)
    +            if any(node.frame().name in defining_methods
    +                   for node in nodes):
    +                continue
    +
    +            # check attribute is defined in a parent's __init__
    +            for parent in cnode.instance_attr_ancestors(attr):
    +                attr_defined = False
    +                # check if any parent method attr is defined in is a defining method
    +                for node in parent.instance_attrs[attr]:
    +                    if node.frame().name in defining_methods:
    +                        attr_defined = True
    +                if attr_defined:
    +                    # we're done :)
    +                    break
    +            else:
    +                # check attribute is defined as a class attribute
    +                try:
    +                    cnode.local_attr(attr)
    +                except astroid.NotFoundError:
    +                    for node in nodes:
    +                        if node.frame().name not in defining_methods:
    +                            # If the attribute was set by a callfunc in any
    +                            # of the defining methods, then don't emit
    +                            # the warning.
    +                            if _called_in_methods(node.frame(), cnode,
    +                                                  defining_methods):
    +                                continue
    +                            self.add_message('attribute-defined-outside-init',
    +                                             args=attr, node=node)
     
         def visit_function(self, node):
             """check method arguments, overriding"""
    @@ -334,8 +439,14 @@ def visit_function(self, node):
             # check if the method is hidden by an attribute
             try:
                 overridden = klass.instance_attr(node.name)[0] # XXX
    -            args = (overridden.root().name, overridden.fromlineno)
    -            self.add_message('method-hidden', args=args, node=node)
    +            overridden_frame = overridden.frame()
    +            if (isinstance(overridden_frame, astroid.Function)
    +                    and overridden_frame.type == 'method'):
    +                overridden_frame = overridden_frame.parent.frame()
    +            if (isinstance(overridden_frame, Class)
    +                    and klass.is_subtype_of(overridden_frame.qname())):
    +                args = (overridden.root().name, overridden.fromlineno)
    +                self.add_message('method-hidden', args=args, node=node)
             except astroid.NotFoundError:
                 pass
     
    @@ -385,7 +496,7 @@ def _check_slots_elt(self, elt):
                 if infered is YES:
                     continue
                 if (not isinstance(infered, astroid.Const) or
    -                not isinstance(infered.value, str)):
    +                    not isinstance(infered.value, six.string_types)):
                     self.add_message('invalid-slots-object',
                                      args=infered.as_string(),
                                      node=elt)
    @@ -403,7 +514,7 @@ def _check_iter(self, node):
     
             for infered_node in infered:
                 if (infered_node is YES
    -                or isinstance(infered_node, Generator)):
    +                    or isinstance(infered_node, Generator)):
                     continue
                 if isinstance(infered_node, astroid.Instance):
                     try:
    @@ -436,10 +547,10 @@ def leave_function(self, node):
                     return
                 class_node = node.parent.frame()
                 if (self._meth_could_be_func and node.type == 'method'
    -                and not node.name in PYMETHODS
    -                and not (node.is_abstract() or
    -                         overrides_a_method(class_node, node.name))
    -                and class_node.type != 'interface'):
    +                    and not node.name in PYMETHODS
    +                    and not (node.is_abstract() or
    +                             overrides_a_method(class_node, node.name))
    +                    and class_node.type != 'interface'):
                     self.add_message('no-self-use', node=node)
     
         def visit_getattr(self, node):
    @@ -451,7 +562,7 @@ class member from outside its class (but ignore __special__
             attrname = node.attrname
             # Check self
             if self.is_first_attr(node):
    -            self._accessed[-1].setdefault(attrname, []).append(node)
    +            self._accessed[-1][attrname].append(node)
                 return
             if not self.linter.is_message_enabled('protected-access'):
                 return
    @@ -460,7 +571,39 @@ class member from outside its class (but ignore __special__
     
         def visit_assattr(self, node):
             if isinstance(node.ass_type(), astroid.AugAssign) and self.is_first_attr(node):
    -            self._accessed[-1].setdefault(node.attrname, []).append(node)
    +            self._accessed[-1][node.attrname].append(node)
    +        self._check_in_slots(node)
    +
    +    def _check_in_slots(self, node):
    +        """ Check that the given assattr node
    +        is defined in the class slots.
    +        """
    +        infered = safe_infer(node.expr)
    +        if infered and isinstance(infered, Instance):
    +            klass = infered._proxied
    +            if '__slots__' not in klass.locals or not klass.newstyle:
    +                return
    +
    +            slots = klass.slots()
    +            if slots is None:
    +                return
    +            # If any ancestor doesn't use slots, the slots
    +            # defined for this class are superfluous.
    +            if any('__slots__' not in ancestor.locals and
    +                   ancestor.name != 'object'
    +                   for ancestor in klass.ancestors()):
    +                return
    +
    +            if not any(slot.value == node.attrname for slot in slots):
    +                # If we have a '__dict__' in slots, then
    +                # assigning any name is valid.
    +                if not any(slot.value == '__dict__' for slot in slots):
    +                    if _is_attribute_property(node.attrname, klass):
    +                        # Properties circumvent the slots mechanism,
    +                        # so we should not emit a warning for them.
    +                        return
    +                    self.add_message('assigning-non-slot',
    +                                     args=(node.attrname, ), node=node)
     
         @check_messages('protected-access')
         def visit_assign(self, assign_node):
    @@ -485,7 +628,8 @@ def _check_protected_attribute_access(self, node):
             '''
             attrname = node.attrname
     
    -        if is_attr_protected(attrname):
    +        if (is_attr_protected(attrname) and
    +                attrname not in self.config.exclude_protected):
     
                 klass = node_frame_class(node)
     
    @@ -508,6 +652,23 @@ def _check_protected_attribute_access(self, node):
                 # We are in a class, one remaining valid cases, Klass._attr inside
                 # Klass
                 if not (callee == klass.name or callee in klass.basenames):
    +                # Detect property assignments in the body of the class.
    +                # This is acceptable:
    +                #
    +                # class A:
    +                #     b = property(lambda: self._b)
    +
    +                stmt = node.parent.statement()
    +                try:
    +                    if (isinstance(stmt, astroid.Assign) and
    +                            (stmt in klass.body or klass.parent_of(stmt)) and
    +                            isinstance(stmt.value, astroid.CallFunc) and
    +                            isinstance(stmt.value.func, astroid.Name) and
    +                            stmt.value.func.name == 'property' and
    +                            is_builtin_object(next(stmt.value.func.infer(), None))):
    +                        return
    +                except astroid.InferenceError:
    +                    pass
                     self.add_message('protected-access', node=node, args=attrname)
     
         def visit_name(self, node):
    @@ -521,7 +682,7 @@ def visit_name(self, node):
         def _check_accessed_members(self, node, accessed):
             """check that accessed members are defined"""
             # XXX refactor, probably much simpler now that E0201 is in type checker
    -        for attr, nodes in accessed.iteritems():
    +        for attr, nodes in six.iteritems(accessed):
                 # deactivate "except doesn't do anything", that's expected
                 # pylint: disable=W0704
                 try:
    @@ -533,7 +694,7 @@ def _check_accessed_members(self, node, accessed):
                     pass
                 # is it an instance attribute of a parent class ?
                 try:
    -                node.instance_attr_ancestors(attr).next()
    +                next(node.instance_attr_ancestors(attr))
                     # yes, stop here
                     continue
                 except StopIteration:
    @@ -565,7 +726,8 @@ def _check_accessed_members(self, node, accessed):
                         lno = defstmt.fromlineno
                         for _node in nodes:
                             if _node.frame() is frame and _node.fromlineno < lno \
    -                           and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')):
    +                           and not are_exclusive(_node.statement(), defstmt,
    +                                                 ('AttributeError', 'Exception', 'BaseException')):
                                 self.add_message('access-member-before-definition',
                                                  node=_node, args=(attr, lno))
     
    @@ -588,8 +750,8 @@ def _check_first_arg_for_type(self, node, metaclass=0):
             # static method
             if node.type == 'staticmethod':
                 if (first_arg == 'self' or
    -                first_arg in self.config.valid_classmethod_first_arg or
    -                first_arg in self.config.valid_metaclass_classmethod_first_arg):
    +                    first_arg in self.config.valid_classmethod_first_arg or
    +                    first_arg in self.config.valid_metaclass_classmethod_first_arg):
                     self.add_message('bad-staticmethod-argument', args=first, node=node)
                     return
                 self._first_attrs[-1] = None
    @@ -600,20 +762,25 @@ def _check_first_arg_for_type(self, node, metaclass=0):
             elif metaclass:
                 # metaclass __new__ or classmethod
                 if node.type == 'classmethod':
    -                self._check_first_arg_config(first,
    +                self._check_first_arg_config(
    +                    first,
                         self.config.valid_metaclass_classmethod_first_arg, node,
                         'bad-mcs-classmethod-argument', node.name)
                 # metaclass regular method
                 else:
    -                self._check_first_arg_config(first,
    -                    self.config.valid_classmethod_first_arg, node, 'bad-mcs-method-argument',
    +                self._check_first_arg_config(
    +                    first,
    +                    self.config.valid_classmethod_first_arg, node,
    +                    'bad-mcs-method-argument',
                         node.name)
             # regular class
             else:
                 # class method
                 if node.type == 'classmethod':
    -                self._check_first_arg_config(first,
    -                    self.config.valid_classmethod_first_arg, node, 'bad-classmethod-argument',
    +                self._check_first_arg_config(
    +                    first,
    +                    self.config.valid_classmethod_first_arg, node,
    +                    'bad-classmethod-argument',
                         node.name)
                 # regular method without self as argument
                 elif first != 'self':
    @@ -625,32 +792,36 @@ def _check_first_arg_config(self, first, config, node, message,
                 if len(config) == 1:
                     valid = repr(config[0])
                 else:
    -                valid = ', '.join(
    -                  repr(v)
    -                  for v in config[:-1])
    -                valid = '%s or %r' % (
    -                    valid, config[-1])
    +                valid = ', '.join(repr(v) for v in config[:-1])
    +                valid = '%s or %r' % (valid, config[-1])
                 self.add_message(message, args=(method_name, valid), node=node)
     
         def _check_bases_classes(self, node):
             """check that the given class node implements abstract methods from
             base classes
             """
    +        def is_abstract(method):
    +            return method.is_abstract(pass_is_abstract=False)
    +
             # check if this class abstract
             if class_is_abstract(node):
                 return
    -        for method in node.methods():
    +
    +        methods = sorted(
    +            unimplemented_abstract_methods(node, is_abstract).items(),
    +            key=lambda item: item[0],
    +        )
    +        for name, method in methods:
                 owner = method.parent.frame()
                 if owner is node:
                     continue
                 # owner is not this class, it must be a parent class
                 # check that the ancestor's method is not abstract
    -            if method.name in node.locals:
    +            if name in node.locals:
                     # it is redefined as an attribute or with a descriptor
                     continue
    -            if method.is_abstract(pass_is_abstract=False):
    -                self.add_message('abstract-method', node=node,
    -                                 args=(method.name, owner.name))
    +            self.add_message('abstract-method', node=node,
    +                             args=(name, owner.name))
     
         def _check_interfaces(self, node):
             """check that the given class node really implements declared
    @@ -678,7 +849,8 @@ def iface_handler(obj):
                         try:
                             method = node_method(node, name)
                         except astroid.NotFoundError:
    -                        self.add_message('missing-interface-method', args=(name, iface.name),
    +                        self.add_message('missing-interface-method',
    +                                         args=(name, iface.name),
                                              node=node)
                             continue
                         # ignore inherited methods
    @@ -686,7 +858,7 @@ def iface_handler(obj):
                             continue
                         # check signature
                         self._check_signature(method, imethod,
    -                                         '%s interface' % iface.name)
    +                                          '%s interface' % iface.name)
             except astroid.InferenceError:
                 if e0221_hack[0]:
                     return
    @@ -705,7 +877,7 @@ def _check_init(self, node):
             method
             """
             if (not self.linter.is_message_enabled('super-init-not-called') and
    -            not self.linter.is_message_enabled('non-parent-init-called')):
    +                not self.linter.is_message_enabled('non-parent-init-called')):
                 return
             klass_node = node.parent.frame()
             to_call = _ancestors_to_call(klass_node)
    @@ -717,21 +889,34 @@ def _check_init(self, node):
                     continue
                 # skip the test if using super
                 if isinstance(expr.expr, astroid.CallFunc) and \
    -               isinstance(expr.expr.func, astroid.Name) and \
    +                   isinstance(expr.expr.func, astroid.Name) and \
                    expr.expr.func.name == 'super':
                     return
                 try:
    -                klass = expr.expr.infer().next()
    -                if klass is YES:
    -                    continue
    -                try:
    -                    del not_called_yet[klass]
    -                except KeyError:
    -                    if klass not in to_call:
    -                        self.add_message('non-parent-init-called', node=expr, args=klass.name)
    +                for klass in expr.expr.infer():
    +                    if klass is YES:
    +                        continue
    +                    # The infered klass can be super(), which was
    +                    # assigned to a variable and the `__init__`
    +                    # was called later.
    +                    #
    +                    # base = super()
    +                    # base.__init__(...)
    +
    +                    if (isinstance(klass, astroid.Instance) and
    +                            isinstance(klass._proxied, astroid.Class) and
    +                            is_builtin_object(klass._proxied) and
    +                            klass._proxied.name == 'super'):
    +                        return
    +                    try:
    +                        del not_called_yet[klass]
    +                    except KeyError:
    +                        if klass not in to_call:
    +                            self.add_message('non-parent-init-called',
    +                                             node=expr, args=klass.name)
                 except astroid.InferenceError:
                     continue
    -        for klass, method in not_called_yet.iteritems():
    +        for klass, method in six.iteritems(not_called_yet):
                 if klass.name == 'object' or method.parent.name == 'object':
                     continue
                 self.add_message('super-init-not-called', args=klass.name, node=node)
    @@ -743,7 +928,8 @@ def _check_signature(self, method1, refmethod, class_type):
             """
             if not (isinstance(method1, astroid.Function)
                     and isinstance(refmethod, astroid.Function)):
    -            self.add_message('method-check-failed', args=(method1, refmethod), node=method1)
    +            self.add_message('method-check-failed',
    +                             args=(method1, refmethod), node=method1)
                 return
             # don't care about functions with unknown argument (builtins)
             if method1.args.args is None or refmethod.args.args is None:
    @@ -754,9 +940,13 @@ def _check_signature(self, method1, refmethod, class_type):
             if is_attr_private(method1.name):
                 return
             if len(method1.args.args) != len(refmethod.args.args):
    -            self.add_message('arguments-differ', args=class_type, node=method1)
    +            self.add_message('arguments-differ',
    +                             args=(class_type, method1.name),
    +                             node=method1)
             elif len(method1.args.defaults) < len(refmethod.args.defaults):
    -            self.add_message('signature-differs', args=class_type, node=method1)
    +            self.add_message('signature-differs',
    +                             args=(class_type, method1.name),
    +                             node=method1)
     
         def is_first_attr(self, node):
             """Check that attribute lookup name use first attribute variable name
    @@ -772,7 +962,7 @@ def _ancestors_to_call(klass_node, method='__init__'):
         to_call = {}
         for base_node in klass_node.ancestors(recurs=False):
             try:
    -            to_call[base_node] = base_node.igetattr(method).next()
    +            to_call[base_node] = next(base_node.igetattr(method))
             except astroid.InferenceError:
                 continue
         return to_call
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py b/pymode/libs/pylint/checkers/design_analysis.py
    similarity index 75%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py
    rename to pymode/libs/pylint/checkers/design_analysis.py
    index c9ef4dfa..9ff10bf3 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/design_analysis.py
    +++ b/pymode/libs/pylint/checkers/design_analysis.py
    @@ -15,29 +15,19 @@
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     """check for signs of poor design"""
     
    -from astroid import Function, If, InferenceError
    +import re
    +from collections import defaultdict
    +
    +from astroid import If, InferenceError
     
     from pylint.interfaces import IAstroidChecker
     from pylint.checkers import BaseChecker
     from pylint.checkers.utils import check_messages
     
    -import re
    -
     # regexp for ignored argument name
     IGNORED_ARGUMENT_NAMES = re.compile('_.*')
     
     
    -def class_is_abstract(klass):
    -    """return true if the given class node should be considered as an abstract
    -    class
    -    """
    -    for attr in klass.values():
    -        if isinstance(attr, Function):
    -            if attr.is_abstract(pass_is_abstract=False):
    -                return True
    -    return False
    -
    -
     MSGS = {
         'R0901': ('Too many ancestors (%s/%s)',
                   'too-many-ancestors',
    @@ -74,14 +64,6 @@ def class_is_abstract(klass):
                   'too-many-statements',
                   'Used when a function or method has too many statements. You \
                   should then split it in smaller functions / methods.'),
    -
    -    'R0921': ('Abstract class not referenced',
    -              'abstract-class-not-used',
    -              'Used when an abstract class is not used as ancestor anywhere.'),
    -    'R0922': ('Abstract class is only referenced %s times',
    -              'abstract-class-little-used',
    -              'Used when an abstract class is used less than X times as \
    -              ancestor.'),
         'R0923': ('Interface not implemented',
                   'interface-not-implemented',
                   'Used when an interface class is not implemented anywhere.'),
    @@ -105,68 +87,66 @@ class MisdesignChecker(BaseChecker):
         options = (('max-args',
                     {'default' : 5, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of arguments for function / method'}
    -                ),
    +               ),
                    ('ignored-argument-names',
                     {'default' : IGNORED_ARGUMENT_NAMES,
                      'type' :'regexp', 'metavar' : '',
                      'help' : 'Argument names that match this expression will be '
                               'ignored. Default to name with leading underscore'}
    -                ),
    +               ),
                    ('max-locals',
                     {'default' : 15, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of locals for function / method body'}
    -                ),
    +               ),
                    ('max-returns',
                     {'default' : 6, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of return / yield for function / '
                              'method body'}
    -                ),
    +               ),
                    ('max-branches',
                     {'default' : 12, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of branch for function / method body'}
    -                ),
    +               ),
                    ('max-statements',
                     {'default' : 50, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of statements in function / method '
                              'body'}
    -                ),
    +               ),
                    ('max-parents',
                     {'default' : 7,
                      'type' : 'int',
                      'metavar' : '',
                      'help' : 'Maximum number of parents for a class (see R0901).'}
    -                ),
    +               ),
                    ('max-attributes',
                     {'default' : 7,
                      'type' : 'int',
                      'metavar' : '',
                      'help' : 'Maximum number of attributes for a class \
     (see R0902).'}
    -                ),
    +               ),
                    ('min-public-methods',
                     {'default' : 2,
                      'type' : 'int',
                      'metavar' : '',
                      'help' : 'Minimum number of public methods for a class \
     (see R0903).'}
    -                ),
    +               ),
                    ('max-public-methods',
                     {'default' : 20,
                      'type' : 'int',
                      'metavar' : '',
                      'help' : 'Maximum number of public methods for a class \
     (see R0904).'}
    -                ),
    -               )
    +               ),
    +              )
     
         def __init__(self, linter=None):
             BaseChecker.__init__(self, linter)
             self.stats = None
             self._returns = None
             self._branches = None
    -        self._used_abstracts = None
             self._used_ifaces = None
    -        self._abstracts = None
             self._ifaces = None
             self._stmts = 0
     
    @@ -174,33 +154,22 @@ def open(self):
             """initialize visit variables"""
             self.stats = self.linter.add_stats()
             self._returns = []
    -        self._branches = []
    -        self._used_abstracts = {}
    +        self._branches = defaultdict(int)
             self._used_ifaces = {}
    -        self._abstracts = []
             self._ifaces = []
     
    -    # Check 'R0921', 'R0922', 'R0923'
         def close(self):
    -        """check that abstract/interface classes are used"""
    -        for abstract in self._abstracts:
    -            if not abstract in self._used_abstracts:
    -                self.add_message('abstract-class-not-used', node=abstract)
    -            elif self._used_abstracts[abstract] < 2:
    -                self.add_message('abstract-class-little-used', node=abstract,
    -                                 args=self._used_abstracts[abstract])
    +        """check that interface classes are used"""
             for iface in self._ifaces:
                 if not iface in self._used_ifaces:
                     self.add_message('interface-not-implemented', node=iface)
     
         @check_messages('too-many-ancestors', 'too-many-instance-attributes',
                         'too-few-public-methods', 'too-many-public-methods',
    -                    'abstract-class-not-used', 'abstract-class-little-used',
                         'interface-not-implemented')
         def visit_class(self, node):
             """check size of inheritance hierarchy and number of instance attributes
             """
    -        self._inc_branch()
             # Is the total inheritance hierarchy is 7 or less?
             nb_parents = len(list(node.ancestors()))
             if nb_parents > self.config.max_parents:
    @@ -213,10 +182,8 @@ def visit_class(self, node):
                 self.add_message('too-many-instance-attributes', node=node,
                                  args=(len(node.instance_attrs),
                                        self.config.max_attributes))
    -        # update abstract / interface classes structures
    -        if class_is_abstract(node):
    -            self._abstracts.append(node)
    -        elif node.type == 'interface' and node.name != 'Interface':
    +        # update interface classes structures
    +        if node.type == 'interface' and node.name != 'Interface':
                 self._ifaces.append(node)
                 for parent in node.ancestors(False):
                     if parent.name == 'Interface':
    @@ -228,49 +195,47 @@ def visit_class(self, node):
             except InferenceError:
                 # XXX log ?
                 pass
    -        for parent in node.ancestors():
    -            try:
    -                self._used_abstracts[parent] += 1
    -            except KeyError:
    -                self._used_abstracts[parent] = 1
     
    -    @check_messages('too-many-ancestors', 'too-many-instance-attributes',
    -                    'too-few-public-methods', 'too-many-public-methods',
    -                    'abstract-class-not-used', 'abstract-class-little-used',
    -                    'interface-not-implemented')
    +    @check_messages('too-few-public-methods', 'too-many-public-methods')
         def leave_class(self, node):
             """check number of public methods"""
    -        nb_public_methods = 0
    -        special_methods = set()
    -        for method in node.methods():
    -            if not method.name.startswith('_'):
    -                nb_public_methods += 1
    -            if method.name.startswith("__"):
    -                special_methods.add(method.name)
    -        # Does the class contain less than 20 public methods ?
    -        if nb_public_methods > self.config.max_public_methods:
    +        my_methods = sum(1 for method in node.mymethods()
    +                         if not method.name.startswith('_'))
    +        all_methods = sum(1 for method in node.methods()
    +                          if not method.name.startswith('_'))
    +
    +        # Does the class contain less than n public methods ?
    +        # This checks only the methods defined in the current class,
    +        # since the user might not have control over the classes
    +        # from the ancestors. It avoids some false positives
    +        # for classes such as unittest.TestCase, which provides
    +        # a lot of assert methods. It doesn't make sense to warn
    +        # when the user subclasses TestCase to add his own tests.
    +        if my_methods > self.config.max_public_methods:
                 self.add_message('too-many-public-methods', node=node,
    -                             args=(nb_public_methods,
    +                             args=(my_methods,
                                        self.config.max_public_methods))
             # stop here for exception, metaclass and interface classes
             if node.type != 'class':
                 return
    -        # Does the class contain more than 5 public methods ?
    -        if nb_public_methods < self.config.min_public_methods:
    -            self.add_message('R0903', node=node,
    -                             args=(nb_public_methods,
    +
    +        # Does the class contain more than n public methods ?
    +        # This checks all the methods defined by ancestors and
    +        # by the current class.
    +        if all_methods < self.config.min_public_methods:
    +            self.add_message('too-few-public-methods', node=node,
    +                             args=(all_methods,
                                        self.config.min_public_methods))
     
         @check_messages('too-many-return-statements', 'too-many-branches',
    -                    'too-many-arguments', 'too-many-locals', 'too-many-statements')
    +                    'too-many-arguments', 'too-many-locals',
    +                    'too-many-statements')
         def visit_function(self, node):
             """check function name, docstring, arguments, redefinition,
             variable names, max locals
             """
    -        self._inc_branch()
             # init branch and returns counters
             self._returns.append(0)
    -        self._branches.append(0)
             # check number of arguments
             args = node.args.args
             if args is not None:
    @@ -291,7 +256,9 @@ def visit_function(self, node):
             # init statements counter
             self._stmts = 1
     
    -    @check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements')
    +    @check_messages('too-many-return-statements', 'too-many-branches',
    +                    'too-many-arguments', 'too-many-locals',
    +                    'too-many-statements')
         def leave_function(self, node):
             """most of the work is done here on close:
             checks for max returns, branch, return in __init__
    @@ -300,7 +267,7 @@ def leave_function(self, node):
             if returns > self.config.max_returns:
                 self.add_message('too-many-return-statements', node=node,
                                  args=(returns, self.config.max_returns))
    -        branches = self._branches.pop()
    +        branches = self._branches[node]
             if branches > self.config.max_branches:
                 self.add_message('too-many-branches', node=node,
                                  args=(branches, self.config.max_branches))
    @@ -327,12 +294,12 @@ def visit_tryexcept(self, node):
             branches = len(node.handlers)
             if node.orelse:
                 branches += 1
    -        self._inc_branch(branches)
    +        self._inc_branch(node, branches)
             self._stmts += branches
     
    -    def visit_tryfinally(self, _):
    +    def visit_tryfinally(self, node):
             """increments the branches counter"""
    -        self._inc_branch(2)
    +        self._inc_branch(node, 2)
             self._stmts += 2
     
         def visit_if(self, node):
    @@ -342,7 +309,7 @@ def visit_if(self, node):
             if node.orelse and (len(node.orelse) > 1 or
                                 not isinstance(node.orelse[0], If)):
                 branches += 1
    -        self._inc_branch(branches)
    +        self._inc_branch(node, branches)
             self._stmts += branches
     
         def visit_while(self, node):
    @@ -350,17 +317,14 @@ def visit_while(self, node):
             branches = 1
             if node.orelse:
                 branches += 1
    -        self._inc_branch(branches)
    +        self._inc_branch(node, branches)
     
         visit_for = visit_while
     
    -    def _inc_branch(self, branchesnum=1):
    +    def _inc_branch(self, node, branchesnum=1):
             """increments the branches counter"""
    -        branches = self._branches
    -        for i in xrange(len(branches)):
    -            branches[i] += branchesnum
    +        self._branches[node.scope()] += branchesnum
     
    -    # FIXME: make a nice report...
     
     def register(linter):
         """required method to auto register this checker """
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py b/pymode/libs/pylint/checkers/exceptions.py
    similarity index 52%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py
    rename to pymode/libs/pylint/checkers/exceptions.py
    index 7e0f3fca..88a8f225 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/exceptions.py
    +++ b/pymode/libs/pylint/checkers/exceptions.py
    @@ -16,43 +16,51 @@
     """
     import sys
     
    -from logilab.common.compat import builtins
    -BUILTINS_NAME = builtins.__name__
     import astroid
    -from astroid import YES, Instance, unpack_infer
    +from astroid import YES, Instance, unpack_infer, List, Tuple
    +from logilab.common.compat import builtins
     
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import is_empty, is_raising, check_messages
    -from pylint.interfaces import IAstroidChecker
    +from pylint.checkers.utils import (
    +    is_empty,
    +    is_raising,
    +    check_messages,
    +    inherit_from_std_ex,
    +    EXCEPTIONS_MODULE,
    +    has_known_bases,
    +    safe_infer)
    +from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
     
    -def infer_bases(klass):
    -    """ Fully infer the bases of the klass node.
     
    -    This doesn't use .ancestors(), because we need
    -    the non-inferable nodes (YES nodes),
    -    which can't be retrieved from .ancestors()
    +def _annotated_unpack_infer(stmt, context=None):
    +    """
    +    Recursively generate nodes inferred by the given statement.
    +    If the inferred value is a list or a tuple, recurse on the elements.
    +    Returns an iterator which yields tuples in the format
    +    ('original node', 'infered node').
         """
    -    for base in klass.bases:
    -        try:
    -            inferit = base.infer().next()
    -        except astroid.InferenceError:
    +    if isinstance(stmt, (List, Tuple)):
    +        for elt in stmt.elts:
    +            inferred = safe_infer(elt)
    +            if inferred and inferred is not YES:
    +                yield elt, inferred
    +        return
    +    for infered in stmt.infer(context):
    +        if infered is YES:
                 continue
    -        if inferit is YES:
    -            yield inferit
    -        else:
    -            for base in infer_bases(inferit):
    -                yield base
    +        yield stmt, infered
    +
     
     PY3K = sys.version_info >= (3, 0)
     OVERGENERAL_EXCEPTIONS = ('Exception',)
    -
    +BUILTINS_NAME = builtins.__name__
     MSGS = {
         'E0701': ('Bad except clauses order (%s)',
                   'bad-except-order',
                   'Used when except clauses are not in the correct order (from the '
                   'more specific to the more generic). If you don\'t fix the order, '
                   'some exceptions may not be catched by the most specific handler.'),
    -    'E0702': ('Raising %s while only classes, instances or string are allowed',
    +    'E0702': ('Raising %s while only classes or instances are allowed',
                   'raising-bad-type',
                   'Used when something which is neither a class, an instance or a \
                   string is raised (i.e. a `TypeError` will be raised).'),
    @@ -75,10 +83,6 @@ def infer_bases(klass):
                   'catching-non-exception',
                   'Used when a class which doesn\'t inherit from \
                    BaseException is used as an exception in an except clause.'),
    -
    -    'W0701': ('Raising a string exception',
    -              'raising-string',
    -              'Used when a string exception is raised.'),
         'W0702': ('No exception type(s) specified',
                   'bare-except',
                   'Used when an except clause doesn\'t specify exceptions type to \
    @@ -101,25 +105,9 @@ def infer_bases(klass):
                   'Used when the exception to catch is of the form \
                   "except A or B:".  If intending to catch multiple, \
                   rewrite as "except (A, B):"'),
    -    'W0712': ('Implicit unpacking of exceptions is not supported in Python 3',
    -              'unpacking-in-except',
    -              'Python3 will not allow implicit unpacking of exceptions in except '
    -              'clauses. '
    -              'See http://www.python.org/dev/peps/pep-3110/',
    -              {'maxversion': (3, 0)}),
    -    'W0713': ('Indexing exceptions will not work on Python 3',
    -              'indexing-exception',
    -              'Indexing exceptions will not work on Python 3. Use '
    -              '`exception.args[index]` instead.',
    -              {'maxversion': (3, 0)}),
         }
     
     
    -if sys.version_info < (3, 0):
    -    EXCEPTIONS_MODULE = "exceptions"
    -else:
    -    EXCEPTIONS_MODULE = "builtins"
    -
     class ExceptionsChecker(BaseChecker):
         """checks for
         * excepts without exception filter
    @@ -137,98 +125,148 @@ class ExceptionsChecker(BaseChecker):
                      'help' : 'Exceptions that will emit a warning '
                               'when being caught. Defaults to "%s"' % (
                                   ', '.join(OVERGENERAL_EXCEPTIONS),)}
    -                ),
    -               )
    +               ),
    +              )
     
    -    @check_messages('raising-string', 'nonstandard-exception', 'raising-bad-type',
    -                    'raising-non-exception', 'notimplemented-raised', 'bad-exception-context')
    +    @check_messages('nonstandard-exception',
    +                    'raising-bad-type', 'raising-non-exception',
    +                    'notimplemented-raised', 'bad-exception-context')
         def visit_raise(self, node):
             """visit raise possibly inferring value"""
             # ignore empty raise
             if node.exc is None:
                 return
             if PY3K and node.cause:
    -            try:
    -                cause = node.cause.infer().next()
    -            except astroid.InferenceError:
    -                pass
    -            else:
    -                if cause is YES:
    -                    return
    -                if isinstance(cause, astroid.Const):
    -                    if cause.value is not None:
    -                        self.add_message('bad-exception-context',
    -                                         node=node)
    -                elif (not isinstance(cause, astroid.Class) and
    -                      not inherit_from_std_ex(cause)):
    -                    self.add_message('bad-exception-context',
    -                                      node=node)
    +            self._check_bad_exception_context(node)
    +
             expr = node.exc
             if self._check_raise_value(node, expr):
                 return
             else:
                 try:
    -                value = unpack_infer(expr).next()
    +                value = next(unpack_infer(expr))
                 except astroid.InferenceError:
                     return
                 self._check_raise_value(node, value)
     
    +    def _check_bad_exception_context(self, node):
    +        """Verify that the exception context is properly set.
    +
    +        An exception context can be only `None` or an exception.
    +        """
    +        cause = safe_infer(node.cause)
    +        if cause in (YES, None):
    +            return
    +        if isinstance(cause, astroid.Const):
    +            if cause.value is not None:
    +                self.add_message('bad-exception-context',
    +                                 node=node)
    +        elif (not isinstance(cause, astroid.Class) and
    +              not inherit_from_std_ex(cause)):
    +            self.add_message('bad-exception-context',
    +                             node=node)
    +
         def _check_raise_value(self, node, expr):
             """check for bad values, string exception and class inheritance
             """
             value_found = True
             if isinstance(expr, astroid.Const):
                 value = expr.value
    -            if isinstance(value, str):
    -                self.add_message('raising-string', node=node)
    -            else:
    +            if not isinstance(value, str):
    +                # raising-string will be emitted from python3 porting checker.
                     self.add_message('raising-bad-type', node=node,
                                      args=value.__class__.__name__)
    -        elif (isinstance(expr, astroid.Name) and \
    -                 expr.name in ('None', 'True', 'False')) or \
    -                 isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple,
    -                                   astroid.Module, astroid.Function)):
    -            self.add_message('raising-bad-type', node=node, args=expr.name)
    +        elif ((isinstance(expr, astroid.Name) and
    +               expr.name in ('None', 'True', 'False')) or
    +              isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple,
    +                                astroid.Module, astroid.Function))):
    +            emit = True
    +            if not PY3K and isinstance(expr, astroid.Tuple):
    +                # On Python 2, using the following is not an error:
    +                #    raise (ZeroDivisionError, None)
    +                #    raise (ZeroDivisionError, )
    +                # What's left to do is to check that the first
    +                # argument is indeed an exception.
    +                # Verifying the other arguments is not
    +                # the scope of this check.
    +                first = expr.elts[0]
    +                inferred = safe_infer(first)
    +                if isinstance(inferred, Instance):
    +                    # pylint: disable=protected-access
    +                    inferred = inferred._proxied
    +                if (inferred is YES or
    +                        isinstance(inferred, astroid.Class)
    +                        and inherit_from_std_ex(inferred)):
    +                    emit = False
    +            if emit:
    +                self.add_message('raising-bad-type',
    +                                 node=node,
    +                                 args=expr.name)
             elif ((isinstance(expr, astroid.Name) and expr.name == 'NotImplemented')
                   or (isinstance(expr, astroid.CallFunc) and
                       isinstance(expr.func, astroid.Name) and
                       expr.func.name == 'NotImplemented')):
                 self.add_message('notimplemented-raised', node=node)
    -        elif isinstance(expr, astroid.BinOp) and expr.op == '%':
    -            self.add_message('raising-string', node=node)
             elif isinstance(expr, (Instance, astroid.Class)):
                 if isinstance(expr, Instance):
    +                # pylint: disable=protected-access
                     expr = expr._proxied
                 if (isinstance(expr, astroid.Class) and
    -                    not inherit_from_std_ex(expr) and
    -                    expr.root().name != BUILTINS_NAME):
    +                    not inherit_from_std_ex(expr)):
                     if expr.newstyle:
                         self.add_message('raising-non-exception', node=node)
                     else:
    -                    self.add_message('nonstandard-exception', node=node)
    +                    if has_known_bases(expr):
    +                        confidence = INFERENCE
    +                    else:
    +                        confidence = INFERENCE_FAILURE
    +                    self.add_message(
    +                        'nonstandard-exception', node=node,
    +                        confidence=confidence)
                 else:
                     value_found = False
             else:
                 value_found = False
             return value_found
     
    -    @check_messages('unpacking-in-except')
    -    def visit_excepthandler(self, node):
    -        """Visit an except handler block and check for exception unpacking."""
    -        if isinstance(node.name, (astroid.Tuple, astroid.List)):
    -            self.add_message('unpacking-in-except', node=node)
    +    def _check_catching_non_exception(self, handler, exc, part):
    +        if isinstance(exc, astroid.Tuple):
    +            # Check if it is a tuple of exceptions.
    +            inferred = [safe_infer(elt) for elt in exc.elts]
    +            if any(node is astroid.YES for node in inferred):
    +                # Don't emit if we don't know every component.
    +                return
    +            if all(node and inherit_from_std_ex(node)
    +                   for node in inferred):
    +                return
     
    -    @check_messages('indexing-exception')
    -    def visit_subscript(self, node):
    -        """ Look for indexing exceptions. """
    -        try:
    -            for infered in node.value.infer():
    -                if not isinstance(infered, astroid.Instance):
    -                    continue
    -                if inherit_from_std_ex(infered):
    -                    self.add_message('indexing-exception', node=node)
    -        except astroid.InferenceError:
    +        if not isinstance(exc, astroid.Class):
    +            # Don't emit the warning if the infered stmt
    +            # is None, but the exception handler is something else,
    +            # maybe it was redefined.
    +            if (isinstance(exc, astroid.Const) and
    +                    exc.value is None):
    +                if ((isinstance(handler.type, astroid.Const) and
    +                     handler.type.value is None) or
    +                        handler.type.parent_of(exc)):
    +                    # If the exception handler catches None or
    +                    # the exception component, which is None, is
    +                    # defined by the entire exception handler, then
    +                    # emit a warning.
    +                    self.add_message('catching-non-exception',
    +                                     node=handler.type,
    +                                     args=(part.as_string(), ))
    +            else:
    +                self.add_message('catching-non-exception',
    +                                 node=handler.type,
    +                                 args=(part.as_string(), ))
                 return
    +        if (not inherit_from_std_ex(exc) and
    +                exc.root().name != BUILTINS_NAME):
    +            if has_known_bases(exc):
    +                self.add_message('catching-non-exception',
    +                                 node=handler.type,
    +                                 args=(exc.name, ))
     
         @check_messages('bare-except', 'broad-except', 'pointless-except',
                         'binary-op-exception', 'bad-except-order',
    @@ -237,70 +275,58 @@ def visit_tryexcept(self, node):
             """check for empty except"""
             exceptions_classes = []
             nb_handlers = len(node.handlers)
    -        for index, handler  in enumerate(node.handlers):
    +        for index, handler in enumerate(node.handlers):
                 # single except doing nothing but "pass" without else clause
    -            if nb_handlers == 1 and is_empty(handler.body) and not node.orelse:
    -                self.add_message('pointless-except', node=handler.type or handler.body[0])
    +            if is_empty(handler.body) and not node.orelse:
    +                self.add_message('pointless-except',
    +                                 node=handler.type or handler.body[0])
                 if handler.type is None:
    -                if nb_handlers == 1 and not is_raising(handler.body):
    +                if not is_raising(handler.body):
                         self.add_message('bare-except', node=handler)
                     # check if a "except:" is followed by some other
                     # except
    -                elif index < (nb_handlers - 1):
    +                if index < (nb_handlers - 1):
                         msg = 'empty except clause should always appear last'
                         self.add_message('bad-except-order', node=node, args=msg)
     
                 elif isinstance(handler.type, astroid.BoolOp):
    -                self.add_message('binary-op-exception', node=handler, args=handler.type.op)
    +                self.add_message('binary-op-exception',
    +                                 node=handler, args=handler.type.op)
                 else:
                     try:
    -                    excs = list(unpack_infer(handler.type))
    +                    excs = list(_annotated_unpack_infer(handler.type))
                     except astroid.InferenceError:
                         continue
    -                for exc in excs:
    -                    # XXX skip other non class nodes
    -                    if exc is YES or not isinstance(exc, astroid.Class):
    +                for part, exc in excs:
    +                    if exc is YES:
    +                        continue
    +                    if (isinstance(exc, astroid.Instance)
    +                            and inherit_from_std_ex(exc)):
    +                        # pylint: disable=protected-access
    +                        exc = exc._proxied
    +
    +                    self._check_catching_non_exception(handler, exc, part)
    +
    +                    if not isinstance(exc, astroid.Class):
                             continue
    +
                         exc_ancestors = [anc for anc in exc.ancestors()
                                          if isinstance(anc, astroid.Class)]
                         for previous_exc in exceptions_classes:
                             if previous_exc in exc_ancestors:
                                 msg = '%s is an ancestor class of %s' % (
                                     previous_exc.name, exc.name)
    -                            self.add_message('bad-except-order', node=handler.type, args=msg)
    +                            self.add_message('bad-except-order',
    +                                             node=handler.type, args=msg)
                         if (exc.name in self.config.overgeneral_exceptions
    -                        and exc.root().name == EXCEPTIONS_MODULE
    -                        and nb_handlers == 1 and not is_raising(handler.body)):
    -                        self.add_message('broad-except', args=exc.name, node=handler.type)
    -
    -                    if (not inherit_from_std_ex(exc) and
    -                        exc.root().name != BUILTINS_NAME):
    -                        # try to see if the exception is based on a C based
    -                        # exception, by infering all the base classes and
    -                        # looking for inference errors
    -                        bases = infer_bases(exc)
    -                        fully_infered = all(inferit is not YES
    -                                            for inferit in bases)
    -                        if fully_infered:
    -                            self.add_message('catching-non-exception',
    -                                             node=handler.type,
    -                                             args=(exc.name, ))
    +                            and exc.root().name == EXCEPTIONS_MODULE
    +                            and not is_raising(handler.body)):
    +                        self.add_message('broad-except',
    +                                         args=exc.name, node=handler.type)
     
    -                exceptions_classes += excs
    +                exceptions_classes += [exc for _, exc in excs]
     
     
    -def inherit_from_std_ex(node):
    -    """return true if the given class node is subclass of
    -    exceptions.Exception
    -    """
    -    if node.name in ('Exception', 'BaseException') \
    -            and node.root().name == EXCEPTIONS_MODULE:
    -        return True
    -    for parent in node.ancestors(recurs=False):
    -        if inherit_from_std_ex(parent):
    -            return True
    -    return False
    -
     def register(linter):
         """required method to auto register this checker"""
         linter.register_checker(ExceptionsChecker(linter))
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py b/pymode/libs/pylint/checkers/format.py
    similarity index 84%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py
    rename to pymode/libs/pylint/checkers/format.py
    index 8b73049c..8c496ac1 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py
    +++ b/pymode/libs/pylint/checkers/format.py
    @@ -24,9 +24,10 @@
     import keyword
     import sys
     import tokenize
    +from functools import reduce # pylint: disable=redefined-builtin
     
    -if not hasattr(tokenize, 'NL'):
    -    raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
    +import six
    +from six.moves import zip, map, filter # pylint: disable=redefined-builtin
     
     from astroid import nodes
     
    @@ -65,10 +66,10 @@
         'C0301': ('Line too long (%s/%s)',
                   'line-too-long',
                   'Used when a line is longer than a given number of characters.'),
    -    'C0302': ('Too many lines in module (%s)', # was W0302
    +    'C0302': ('Too many lines in module (%s/%s)', # was W0302
                   'too-many-lines',
                   'Used when a module has too much lines, reducing its readability.'
    -              ),
    +             ),
         'C0303': ('Trailing whitespace',
                   'trailing-whitespace',
                   'Used when there is whitespace between the end of a line and the '
    @@ -104,28 +105,20 @@
                    'bracket or block opener.'),
                   {'old_names': [('C0323', 'no-space-after-operator'),
                                  ('C0324', 'no-space-after-comma'),
    -                             ('C0322', 'no-space-before-operator')]})
    -    }
    -
    -
    -if sys.version_info < (3, 0):
    -
    -    MSGS.update({
    -    'W0331': ('Use of the <> operator',
    -              'old-ne-operator',
    -              'Used when the deprecated "<>" operator is used instead \
    -              of "!=".'),
    +                             ('C0322', 'no-space-before-operator')]}),
         'W0332': ('Use of "l" as long integer identifier',
                   'lowercase-l-suffix',
                   'Used when a lower case "l" is used to mark a long integer. You '
                   'should use a upper case "L" since the letter "l" looks too much '
    -              'like the digit "1"'),
    -    'W0333': ('Use of the `` operator',
    -              'backtick',
    -              'Used when the deprecated "``" (backtick) operator is used '
    -              'instead  of the str() function.',
    -              {'scope': WarningScope.NODE}),
    -    })
    +              'like the digit "1"',
    +              {'maxversion': (3, 0)}),
    +    'C0327': ('Mixed line endings LF and CRLF',
    +              'mixed-line-endings',
    +              'Used when there are mixed (LF and CRLF) newline signs in a file.'),
    +    'C0328': ('Unexpected line ending format. There is \'%s\' while it should be \'%s\'.',
    +              'unexpected-line-ending-format',
    +              'Used when there is different newline than expected.'),
    +    }
     
     
     def _underline_token(token):
    @@ -145,29 +138,28 @@ def _column_distance(token1, token2):
     
     
     def _last_token_on_line_is(tokens, line_end, token):
    -    return (
    -        line_end > 0 and tokens.token(line_end-1) == token or
    -        line_end > 1 and tokens.token(line_end-2) == token 
    -        and tokens.type(line_end-1) == tokenize.COMMENT)
    +    return (line_end > 0 and tokens.token(line_end-1) == token or
    +            line_end > 1 and tokens.token(line_end-2) == token
    +            and tokens.type(line_end-1) == tokenize.COMMENT)
     
     
     def _token_followed_by_eol(tokens, position):
    -  return (tokens.type(position+1) == tokenize.NL or
    -          tokens.type(position+1) == tokenize.COMMENT and
    -          tokens.type(position+2) == tokenize.NL)
    +    return (tokens.type(position+1) == tokenize.NL or
    +            tokens.type(position+1) == tokenize.COMMENT and
    +            tokens.type(position+2) == tokenize.NL)
     
     
     def _get_indent_length(line):
    -  """Return the length of the indentation on the given token's line."""
    -  result = 0
    -  for char in line:
    -    if char == ' ':
    -      result += 1
    -    elif char == '\t':
    -      result += _TAB_LENGTH
    -    else:
    -      break
    -  return result
    +    """Return the length of the indentation on the given token's line."""
    +    result = 0
    +    for char in line:
    +        if char == ' ':
    +            result += 1
    +        elif char == '\t':
    +            result += _TAB_LENGTH
    +        else:
    +            break
    +    return result
     
     
     def _get_indent_hint_line(bar_positions, bad_position):
    @@ -311,7 +303,7 @@ def add_block_warning(self, token_position, state, valid_offsets):
             self.retained_warnings.append((token_position, state, valid_offsets))
     
         def get_valid_offsets(self, idx):
    -        """"Returns the valid offsets for the token at the given position."""
    +        """Returns the valid offsets for the token at the given position."""
             # The closing brace on a dict or the 'for' in a dict comprehension may
             # reset two indent levels because the dict value is ended implicitly
             stack_top = -1
    @@ -336,16 +328,19 @@ def _hanging_indent_after_bracket(self, bracket, position):
                     _BeforeBlockOffsets(indentation + self._continuation_size,
                                         indentation + self._continuation_size * 2))
             elif bracket == ':':
    -            if self._cont_stack[-1].context_type == CONTINUED:
    -                # If the dict key was on the same line as the open brace, the new
    -                # correct indent should be relative to the key instead of the
    -                # current indent level
    -                paren_align = self._cont_stack[-1].valid_outdent_offsets
    -                next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
    -                next_align[next_align.keys()[0] + self._continuation_size] = True
    -            else:
    -                next_align = _Offsets(indentation + self._continuation_size, indentation)
    -                paren_align = _Offsets(indentation + self._continuation_size, indentation)
    +            # If the dict key was on the same line as the open brace, the new
    +            # correct indent should be relative to the key instead of the
    +            # current indent level
    +            paren_align = self._cont_stack[-1].valid_outdent_offsets
    +            next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
    +            next_align_keys = list(next_align.keys())
    +            next_align[next_align_keys[0] + self._continuation_size] = True
    +            # Note that the continuation of
    +            # d = {
    +            #       'a': 'b'
    +            #            'c'
    +            # }
    +            # is handled by the special-casing for hanging continued string indents.
                 return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align)
             else:
                 return _ContinuedIndent(
    @@ -358,21 +353,22 @@ def _hanging_indent_after_bracket(self, bracket, position):
         def _continuation_inside_bracket(self, bracket, pos):
             """Extracts indentation information for a continued indent."""
             indentation = _get_indent_length(self._tokens.line(pos))
    -        if self._is_block_opener and self._tokens.start_col(pos+1) - indentation == self._block_indent_size:
    +        token_start = self._tokens.start_col(pos)
    +        next_token_start = self._tokens.start_col(pos + 1)
    +        if self._is_block_opener and next_token_start - indentation == self._block_indent_size:
                 return _ContinuedIndent(
                     CONTINUED_BLOCK,
                     bracket,
                     pos,
    -                _Offsets(self._tokens.start_col(pos)),
    -                _BeforeBlockOffsets(self._tokens.start_col(pos+1),
    -                                    self._tokens.start_col(pos+1) + self._continuation_size))
    +                _Offsets(token_start),
    +                _BeforeBlockOffsets(next_token_start, next_token_start + self._continuation_size))
             else:
                 return _ContinuedIndent(
                     CONTINUED,
                     bracket,
                     pos,
    -                _Offsets(self._tokens.start_col(pos)),
    -                _Offsets(self._tokens.start_col(pos+1)))
    +                _Offsets(token_start),
    +                _Offsets(next_token_start))
     
         def pop_token(self):
             self._cont_stack.pop()
    @@ -404,7 +400,6 @@ class FormatChecker(BaseTokenChecker):
         * unauthorized constructions
         * strict indentation
         * line length
    -    * use of <> instead of !=
         """
     
         __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
    @@ -416,7 +411,7 @@ class FormatChecker(BaseTokenChecker):
         # configuration options
         # for available dict keys/values see the optik parser 'add_option' method
         options = (('max-line-length',
    -                {'default' : 80, 'type' : "int", 'metavar' : '',
    +                {'default' : 100, 'type' : "int", 'metavar' : '',
                      'help' : 'Maximum number of characters on a single line.'}),
                    ('ignore-long-lines',
                     {'type': 'regexp', 'metavar': '',
    @@ -424,9 +419,9 @@ class FormatChecker(BaseTokenChecker):
                      'help': ('Regexp for a line that is allowed to be longer than '
                               'the limit.')}),
                    ('single-line-if-stmt',
    -                 {'default': False, 'type' : 'yn', 'metavar' : '',
    -                  'help' : ('Allow the body of an if to be on the same '
    -                            'line as the test if there is no else.')}),
    +                {'default': False, 'type' : 'yn', 'metavar' : '',
    +                 'help' : ('Allow the body of an if to be on the same '
    +                           'line as the test if there is no else.')}),
                    ('no-space-check',
                     {'default': ','.join(_NO_SPACE_CHECK_CHOICES),
                      'type': 'multiple_choice',
    @@ -436,16 +431,21 @@ class FormatChecker(BaseTokenChecker):
                    ('max-module-lines',
                     {'default' : 1000, 'type' : 'int', 'metavar' : '',
                      'help': 'Maximum number of lines in a module'}
    -                ),
    +               ),
                    ('indent-string',
                     {'default' : '    ', 'type' : "string", 'metavar' : '',
    -                 'help' : 'String used as indentation unit. This is usually \
    -"    " (4 spaces) or "\\t" (1 tab).'}),
    +                 'help' : 'String used as indentation unit. This is usually '
    +                          '"    " (4 spaces) or "\\t" (1 tab).'}),
                    ('indent-after-paren',
                     {'type': 'int', 'metavar': '', 'default': 4,
                      'help': 'Number of spaces of indent required inside a hanging '
                              ' or continued line.'}),
    -               )
    +               ('expected-line-ending-format',
    +                {'type': 'choice', 'metavar': '', 'default': '',
    +                 'choices': ['', 'LF', 'CRLF'],
    +                 'help': ('Expected format of line ending, '
    +                          'e.g. empty (any line ending), LF or CRLF.')}),
    +              )
     
         def __init__(self, linter=None):
             BaseTokenChecker.__init__(self, linter)
    @@ -499,7 +499,7 @@ def _check_keyword_parentheses(self, tokens, start):
             keyword_token = tokens[start][1]
             line_num = tokens[start][2][0]
     
    -        for i in xrange(start, len(tokens) - 1):
    +        for i in range(start, len(tokens) - 1):
                 token = tokens[i]
     
                 # If we hit a newline, then assume any parens were for continuation.
    @@ -513,8 +513,9 @@ def _check_keyword_parentheses(self, tokens, start):
                     if not depth:
                         # ')' can't happen after if (foo), since it would be a syntax error.
                         if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or
    -                        tokens[i+1][0] in (tokenize.NEWLINE, tokenize.ENDMARKER,
    -                                             tokenize.COMMENT)):
    +                            tokens[i+1][0] in (tokenize.NEWLINE,
    +                                               tokenize.ENDMARKER,
    +                                               tokenize.COMMENT)):
                             # The empty tuple () is always accepted.
                             if i == start + 2:
                                 return
    @@ -591,7 +592,7 @@ def _handle_colon(self, tokens, i):
             if self._inside_brackets('['):
                 return
             if (self._inside_brackets('{') and
    -            _DICT_SEPARATOR in self.config.no_space_check):
    +                _DICT_SEPARATOR in self.config.no_space_check):
                 policy = (_IGNORE, _IGNORE)
             else:
                 policy = (_MUST_NOT, _MUST)
    @@ -624,13 +625,13 @@ def _policy_string(policy):
                     return 'No', 'allowed'
     
             def _name_construct(token):
    -            if tokens[i][1] == ',':
    +            if token[1] == ',':
                     return 'comma'
    -            elif tokens[i][1] == ':':
    +            elif token[1] == ':':
                     return ':'
    -            elif tokens[i][1] in '()[]{}':
    +            elif token[1] in '()[]{}':
                     return 'bracket'
    -            elif tokens[i][1] in ('<', '>', '<=', '>=', '!=', '=='):
    +            elif token[1] in ('<', '>', '<=', '>=', '!=', '=='):
                     return 'comparison'
                 else:
                     if self._inside_brackets('('):
    @@ -639,7 +640,8 @@ def _name_construct(token):
                         return 'assignment'
     
             good_space = [True, True]
    -        pairs = [(tokens[i-1], tokens[i]), (tokens[i], tokens[i+1])]
    +        token = tokens[i]
    +        pairs = [(tokens[i-1], token), (token, tokens[i+1])]
     
             for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
                 if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
    @@ -660,19 +662,15 @@ def _name_construct(token):
                     if not ok:
                         warnings.append((policy, position))
             for policy, position in warnings:
    -            construct = _name_construct(tokens[i])
    +            construct = _name_construct(token)
                 count, state = _policy_string(policy)
    -            self.add_message('bad-whitespace', line=tokens[i][2][0],
    +            self.add_message('bad-whitespace', line=token[2][0],
                                  args=(count, state, position, construct,
    -                                   _underline_token(tokens[i])))
    +                                   _underline_token(token)))
     
         def _inside_brackets(self, left):
             return self._bracket_stack[-1] == left
     
    -    def _handle_old_ne_operator(self, tokens, i):
    -        if tokens[i][1] == '<>':
    -            self.add_message('old-ne-operator', line=tokens[i][2][0])
    -
         def _prepare_token_dispatcher(self):
             raw = [
                 (_KEYWORD_TOKENS,
    @@ -692,7 +690,6 @@ def _prepare_token_dispatcher(self):
     
                 (['lambda'], self._open_lambda),
     
    -            (['<>'], self._handle_old_ne_operator),
                 ]
     
             dispatch = {}
    @@ -717,6 +714,7 @@ def process_tokens(self, tokens):
             self._lines = {}
             self._visited_lines = {}
             token_handlers = self._prepare_token_dispatcher()
    +        self._last_line_ending = None
     
             self._current_line = ContinuedLineState(tokens, self.config)
             for idx, (tok_type, token, start, _, line) in enumerate(tokens):
    @@ -729,7 +727,7 @@ def process_tokens(self, tokens):
                         self.new_line(TokenWrapper(tokens), idx-1, idx+1)
                     else:
                         self.new_line(TokenWrapper(tokens), idx-1, idx)
    -            
    +
                 if tok_type == tokenize.NEWLINE:
                     # a program statement, or ENDMARKER, will eventually follow,
                     # after some (possibly empty) run of tokens of the form
    @@ -739,6 +737,7 @@ def process_tokens(self, tokens):
                     check_equal = True
                     self._process_retained_warnings(TokenWrapper(tokens), idx)
                     self._current_line.next_logical_line()
    +                self._check_line_ending(token, line_num)
                 elif tok_type == tokenize.INDENT:
                     check_equal = False
                     self.check_indent_level(token, indents[-1]+1, line_num)
    @@ -778,14 +777,41 @@ def process_tokens(self, tokens):
     
             line_num -= 1 # to be ok with "wc -l"
             if line_num > self.config.max_module_lines:
    -            self.add_message('too-many-lines', args=line_num, line=1)
    +            # Get the line where the too-many-lines (or its message id)
    +            # was disabled or default to 1.
    +            symbol = self.linter.msgs_store.check_message_id('too-many-lines')
    +            names = (symbol.msgid, 'too-many-lines')
    +            line = next(filter(None,
    +                               map(self.linter._pragma_lineno.get, names)), 1)
    +            self.add_message('too-many-lines',
    +                             args=(line_num, self.config.max_module_lines),
    +                             line=line)
    +
    +    def _check_line_ending(self, line_ending, line_num):
    +        # check if line endings are mixed
    +        if self._last_line_ending is not None:
    +            if line_ending != self._last_line_ending:
    +                self.add_message('mixed-line-endings', line=line_num)
    +
    +        self._last_line_ending = line_ending
    +
    +        # check if line ending is as expected
    +        expected = self.config.expected_line_ending_format
    +        if expected:
    +            # reduce multiple \n\n\n\n to one \n
    +            line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
    +            line_ending = 'LF' if line_ending == '\n' else 'CRLF'
    +            if line_ending != expected:
    +                self.add_message('unexpected-line-ending-format', args=(line_ending, expected),
    +                                 line=line_num)
    +
     
         def _process_retained_warnings(self, tokens, current_pos):
             single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
     
             for indent_pos, state, offsets in self._current_line.retained_warnings:
                 block_type = offsets[tokens.start_col(indent_pos)]
    -            hints = dict((k, v) for k, v in offsets.iteritems()
    +            hints = dict((k, v) for k, v in six.iteritems(offsets)
                              if v != block_type)
                 if single_line_block_stmt and block_type == WITH_BODY:
                     self._add_continuation_message(state, hints, tokens, indent_pos)
    @@ -793,16 +819,19 @@ def _process_retained_warnings(self, tokens, current_pos):
                     self._add_continuation_message(state, hints, tokens, indent_pos)
     
         def _check_continued_indentation(self, tokens, next_idx):
    +        def same_token_around_nl(token_type):
    +            return (tokens.type(next_idx) == token_type and
    +                    tokens.type(next_idx-2) == token_type)
    +
             # Do not issue any warnings if the next line is empty.
             if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
                 return
     
             state, valid_offsets = self._current_line.get_valid_offsets(next_idx)
    -        # Special handling for hanging comments. If the last line ended with a
    -        # comment and the new line contains only a comment, the line may also be
    -        # indented to the start of the previous comment.
    -        if (tokens.type(next_idx) == tokenize.COMMENT and
    -                tokens.type(next_idx-2) == tokenize.COMMENT):
    +        # Special handling for hanging comments and strings. If the last line ended
    +        # with a comment (string) and the new line contains only a comment, the line
    +        # may also be indented to the start of the previous token.
    +        if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(tokenize.STRING):
                 valid_offsets[tokens.start_col(next_idx-2)] = True
     
             # We can only decide if the indentation of a continued line before opening
    @@ -839,7 +868,7 @@ def visit_default(self, node):
                 # by taking the last line of the body and adding 1, which
                 # should be the line of finally:
                 if (isinstance(node.parent, nodes.TryFinally)
    -                and node in node.parent.finalbody):
    +                    and node in node.parent.finalbody):
                     prev_line = node.parent.body[0].tolineno + 1
                 else:
                     prev_line = node.parent.statement().fromlineno
    @@ -856,7 +885,7 @@ def visit_default(self, node):
                 tolineno = node.tolineno
             assert tolineno, node
             lines = []
    -        for line in xrange(line, tolineno + 1):
    +        for line in range(line, tolineno + 1):
                 self._visited_lines[line] = 1
                 try:
                     lines.append(self._lines[line].rstrip())
    @@ -872,18 +901,14 @@ def _check_multi_statement_line(self, node, line):
             # For try... except... finally..., the two nodes
             # appear to be on the same line due to how the AST is built.
             if (isinstance(node, nodes.TryExcept) and
    -            isinstance(node.parent, nodes.TryFinally)):
    +                isinstance(node.parent, nodes.TryFinally)):
                 return
             if (isinstance(node.parent, nodes.If) and not node.parent.orelse
    -            and self.config.single_line_if_stmt):
    +                and self.config.single_line_if_stmt):
                 return
             self.add_message('multiple-statements', node=node)
             self._visited_lines[line] = 2
     
    -    @check_messages('backtick')
    -    def visit_backquote(self, node):
    -        self.add_message('backtick', node=node)
    -
         def check_lines(self, lines, i):
             """check lines have less than a maximum number of characters
             """
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py b/pymode/libs/pylint/checkers/imports.py
    similarity index 86%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py
    rename to pymode/libs/pylint/checkers/imports.py
    index 8b73c6f6..1969eeb1 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/imports.py
    +++ b/pymode/libs/pylint/checkers/imports.py
    @@ -16,19 +16,31 @@
     """imports checkers for Python code"""
     
     import sys
    +from collections import defaultdict
    +
    +import six
    +from six.moves import map # pylint: disable=redefined-builtin
     
     from logilab.common.graph import get_cycles, DotBackend
    -from logilab.common.modutils import get_module_part, is_standard_module
     from logilab.common.ureports import VerbatimText, Paragraph
     
     import astroid
     from astroid import are_exclusive
    +from astroid.modutils import get_module_part, is_standard_module
     
     from pylint.interfaces import IAstroidChecker
     from pylint.utils import EmptyReport
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import check_messages
    +from pylint.checkers.utils import check_messages, is_import_error
     
    +def _except_import_error(node):
    +    """
    +    Check if the try-except node has an ImportError handler.
    +    Return True if an ImportError handler was infered, False otherwise.
    +    """
    +    if not isinstance(node, astroid.TryExcept):
    +        return
    +    return any(map(is_import_error, node.handlers))
     
     def get_first_import(node, context, name, base, level):
         """return the node where [base.] is imported or None if not found
    @@ -48,7 +60,8 @@ def get_first_import(node, context, name, base, level):
                     break
             elif isinstance(first, astroid.From):
                 if level == first.level and any(
    -                fullname == '%s.%s' % (first.modname, iname[0]) for iname in first.names):
    +                    fullname == '%s.%s' % (first.modname, iname[0])
    +                    for iname in first.names):
                     found = True
                     break
         if found and not are_exclusive(first, node):
    @@ -97,14 +110,14 @@ def dependencies_graph(filename, dep_info):
         done = {}
         printer = DotBackend(filename[:-4], rankdir='LR')
         printer.emit('URL="." node[shape="box"]')
    -    for modname, dependencies in sorted(dep_info.iteritems()):
    +    for modname, dependencies in sorted(six.iteritems(dep_info)):
             done[modname] = 1
             printer.emit_node(modname)
             for modname in dependencies:
                 if modname not in done:
                     done[modname] = 1
                     printer.emit_node(modname)
    -    for depmodname, dependencies in sorted(dep_info.iteritems()):
    +    for depmodname, dependencies in sorted(six.iteritems(dep_info)):
             for modname in dependencies:
                 printer.emit_edge(modname, depmodname)
         printer.generate(filename)
    @@ -138,8 +151,9 @@ def make_graph(filename, dep_info, sect, gtype):
                   'Used a module marked as deprecated is imported.'),
         'W0403': ('Relative import %r, should be %r',
                   'relative-import',
    -              'Used when an import relative to the package directory is \
    -              detected.'),
    +              'Used when an import relative to the package directory is '
    +              'detected.',
    +              {'maxversion': (3, 0)}),
         'W0404': ('Reimport %r (imported line %s)',
                   'reimported',
                   'Used when a module is reimported multiple times.'),
    @@ -178,30 +192,29 @@ class ImportsChecker(BaseChecker):
                      'metavar' : '',
                      'help' : 'Deprecated modules which should not be used, \
     separated by a comma'}
    -                ),
    +               ),
                    ('import-graph',
                     {'default' : '',
                      'type' : 'string',
                      'metavar' : '',
                      'help' : 'Create a graph of every (i.e. internal and \
     external) dependencies in the given file (report RP0402 must not be disabled)'}
    -                ),
    +               ),
                    ('ext-import-graph',
                     {'default' : '',
                      'type' : 'string',
                      'metavar' : '',
                      'help' : 'Create a graph of external dependencies in the \
     given file (report RP0402 must not be disabled)'}
    -                ),
    +               ),
                    ('int-import-graph',
                     {'default' : '',
                      'type' : 'string',
                      'metavar' : '',
                      'help' : 'Create a graph of internal dependencies in the \
     given file (report RP0402 must not be disabled)'}
    -                ),
    -
    -               )
    +               ),
    +              )
     
         def __init__(self, linter=None):
             BaseChecker.__init__(self, linter)
    @@ -212,27 +225,28 @@ def __init__(self, linter=None):
                              self.report_external_dependencies),
                             ('RP0402', 'Modules dependencies graph',
                              self.report_dependencies_graph),
    -                        )
    +                       )
     
         def open(self):
             """called before visiting project (i.e set of modules)"""
             self.linter.add_stats(dependencies={})
             self.linter.add_stats(cycles=[])
             self.stats = self.linter.stats
    -        self.import_graph = {}
    +        self.import_graph = defaultdict(set)
     
         def close(self):
             """called before visiting project (i.e set of modules)"""
             # don't try to compute cycles if the associated message is disabled
             if self.linter.is_message_enabled('cyclic-import'):
    -            for cycle in get_cycles(self.import_graph):
    +            vertices = list(self.import_graph)
    +            for cycle in get_cycles(self.import_graph, vertices=vertices):
                     self.add_message('cyclic-import', args=' -> '.join(cycle))
     
         def visit_import(self, node):
             """triggered when an import statement is seen"""
             modnode = node.root()
             for name, _ in node.names:
    -            importedmodnode = self.get_imported_module(modnode, node, name)
    +            importedmodnode = self.get_imported_module(node, name)
                 if importedmodnode is None:
                     continue
                 self._check_relative_import(modnode, node, importedmodnode, name)
    @@ -252,14 +266,14 @@ def visit_from(self, node):
                 if prev:
                     # consecutive future statements are possible
                     if not (isinstance(prev, astroid.From)
    -                       and prev.modname == '__future__'):
    +                        and prev.modname == '__future__'):
                         self.add_message('misplaced-future', node=node)
                 return
             for name, _ in node.names:
                 if name == '*':
                     self.add_message('wildcard-import', args=basename, node=node)
             modnode = node.root()
    -        importedmodnode = self.get_imported_module(modnode, node, basename)
    +        importedmodnode = self.get_imported_module(node, basename)
             if importedmodnode is None:
                 return
             self._check_relative_import(modnode, node, importedmodnode, basename)
    @@ -269,15 +283,16 @@ def visit_from(self, node):
                     self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
                     self._check_reimport(node, name, basename, node.level)
     
    -    def get_imported_module(self, modnode, importnode, modname):
    +    def get_imported_module(self, importnode, modname):
             try:
                 return importnode.do_import_module(modname)
    -        except astroid.InferenceError, ex:
    +        except astroid.InferenceError as ex:
                 if str(ex) != modname:
                     args = '%r (%s)' % (modname, ex)
                 else:
                     args = repr(modname)
    -            self.add_message("import-error", args=args, node=importnode)
    +            if not _except_import_error(importnode.parent):
    +                self.add_message("import-error", args=args, node=importnode)
     
         def _check_relative_import(self, modnode, importnode, importedmodnode,
                                    importedasname):
    @@ -294,12 +309,16 @@ def _check_relative_import(self, modnode, importnode, importedmodnode,
                 return False
             if importedmodnode.name != importedasname:
                 # this must be a relative import...
    -            self.add_message('relative-import', args=(importedasname, importedmodnode.name),
    +            self.add_message('relative-import',
    +                             args=(importedasname, importedmodnode.name),
                                  node=importnode)
     
         def _add_imported_module(self, node, importedmodname):
             """notify an imported module, used to analyze dependencies"""
    -        importedmodname = get_module_part(importedmodname)
    +        try:
    +            importedmodname = get_module_part(importedmodname)
    +        except ImportError:
    +            pass
             context_name = node.root().name
             if context_name == importedmodname:
                 # module importing itself !
    @@ -311,8 +330,8 @@ def _add_imported_module(self, node, importedmodname):
                 if not context_name in importedmodnames:
                     importedmodnames.add(context_name)
                 # update import graph
    -            mgraph = self.import_graph.setdefault(context_name, set())
    -            if not importedmodname in mgraph:
    +            mgraph = self.import_graph[context_name]
    +            if importedmodname not in mgraph:
                     mgraph.add(importedmodname)
     
         def _check_deprecated_module(self, node, mod_path):
    @@ -339,7 +358,7 @@ def _check_reimport(self, node, name, basename=None, level=None):
     
         def report_external_dependencies(self, sect, _, dummy):
             """return a verbatim layout for displaying dependencies"""
    -        dep_info = make_tree_defs(self._external_dependencies_info().iteritems())
    +        dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
             if not dep_info:
                 raise EmptyReport()
             tree_str = repr_tree_defs(dep_info)
    @@ -369,9 +388,9 @@ def _external_dependencies_info(self):
             cache them
             """
             if self.__ext_dep_info is None:
    -            package = self.linter.base_name
    +            package = self.linter.current_name
                 self.__ext_dep_info = result = {}
    -            for importee, importers in self.stats['dependencies'].iteritems():
    +            for importee, importers in six.iteritems(self.stats['dependencies']):
                     if not importee.startswith(package):
                         result[importee] = importers
             return self.__ext_dep_info
    @@ -381,9 +400,9 @@ def _internal_dependencies_info(self):
             cache them
             """
             if self.__int_dep_info is None:
    -            package = self.linter.base_name
    +            package = self.linter.current_name
                 self.__int_dep_info = result = {}
    -            for importee, importers in self.stats['dependencies'].iteritems():
    +            for importee, importers in six.iteritems(self.stats['dependencies']):
                     if importee.startswith(package):
                         result[importee] = importers
             return self.__int_dep_info
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py b/pymode/libs/pylint/checkers/logging.py
    similarity index 69%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py
    rename to pymode/libs/pylint/checkers/logging.py
    index cbdf0f2a..897c1c7f 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/logging.py
    +++ b/pymode/libs/pylint/checkers/logging.py
    @@ -20,18 +20,29 @@
     from pylint.checkers import utils
     from pylint.checkers.utils import check_messages
     
    +import six
    +
    +
     MSGS = {
         'W1201': ('Specify string format arguments as logging function parameters',
    -             'logging-not-lazy',
    -             'Used when a logging statement has a call form of '
    -             '"logging.(format_string % (format_args...))". '
    -             'Such calls should leave string interpolation to the logging '
    -             'method itself and be written '
    -             '"logging.(format_string, format_args...)" '
    -             'so that the program may avoid incurring the cost of the '
    -             'interpolation in those cases in which no message will be '
    -             'logged. For more, see '
    -             'http://www.python.org/dev/peps/pep-0282/.'),
    +              'logging-not-lazy',
    +              'Used when a logging statement has a call form of '
    +              '"logging.(format_string % (format_args...))". '
    +              'Such calls should leave string interpolation to the logging '
    +              'method itself and be written '
    +              '"logging.(format_string, format_args...)" '
    +              'so that the program may avoid incurring the cost of the '
    +              'interpolation in those cases in which no message will be '
    +              'logged. For more, see '
    +              'http://www.python.org/dev/peps/pep-0282/.'),
    +    'W1202': ('Use % formatting in logging functions but pass the % '
    +              'parameters as arguments',
    +              'logging-format-interpolation',
    +              'Used when a logging statement has a call form of '
    +              '"logging.(format_string.format(format_args...))"'
    +              '. Such calls should use % formatting instead, but leave '
    +              'interpolation to the logging function by passing the parameters '
    +              'as arguments.'),
         'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
                   'logging-unsupported-format',
                   'Used when an unsupported format character is used in a logging\
    @@ -53,6 +64,27 @@
         'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn',
         'warning'])
     
    +def is_method_call(callfunc_node, types=(), methods=()):
    +    """Determines if a CallFunc node represents a method call.
    +
    +    Args:
    +      callfunc_node: The CallFunc AST node to check.
    +      types: Optional sequence of caller type names to restrict check.
    +      methods: Optional sequence of method names to restrict check.
    +
    +    Returns:
    +      True, if the node represents a method call for the given type and
    +      method names, False otherwise.
    +    """
    +    if not isinstance(callfunc_node, astroid.CallFunc):
    +        return False
    +    func = utils.safe_infer(callfunc_node.func)
    +    return (isinstance(func, astroid.BoundMethod)
    +            and isinstance(func.bound, astroid.Instance)
    +            and (func.bound.name in types if types else True)
    +            and (func.name in methods if methods else True))
    +
    +
     
     class LoggingChecker(checkers.BaseChecker):
         """Checks use of the logging module."""
    @@ -62,15 +94,15 @@ class LoggingChecker(checkers.BaseChecker):
         msgs = MSGS
     
         options = (('logging-modules',
    -                {'default' : ('logging',),
    -                 'type' : 'csv',
    -                 'metavar' : '',
    -                 'help' : ('Logging modules to check that the string format '
    -                           'arguments are in logging function parameter format')}
    -                ),
    -               )
    -
    -    def visit_module(self, unused_node):
    +                {'default': ('logging',),
    +                 'type': 'csv',
    +                 'metavar': '',
    +                 'help': 'Logging modules to check that the string format '
    +                         'arguments are in logging function parameter format'}
    +               ),
    +              )
    +
    +    def visit_module(self, node): # pylint: disable=unused-argument
             """Clears any state left in this checker from last module checked."""
             # The code being checked can just as easily "import logging as foo",
             # so it is necessary to process the imports and store in this field
    @@ -105,19 +137,19 @@ def visit_import(self, node):
         def visit_callfunc(self, node):
             """Checks calls to logging methods."""
             def is_logging_name():
    -           return (isinstance(node.func, astroid.Getattr) and
    -                   isinstance(node.func.expr, astroid.Name) and 
    -                   node.func.expr.name in self._logging_names)
    +            return (isinstance(node.func, astroid.Getattr) and
    +                    isinstance(node.func.expr, astroid.Name) and
    +                    node.func.expr.name in self._logging_names)
     
             def is_logger_class():
                 try:
                     for inferred in node.func.infer():
                         if isinstance(inferred, astroid.BoundMethod):
                             parent = inferred._proxied.parent
    -                        if (isinstance(parent, astroid.Class) and 
    -                            (parent.qname() == 'logging.Logger' or 
    -                             any(ancestor.qname() == 'logging.Logger' 
    -                                 for ancestor in parent.ancestors()))):
    +                        if (isinstance(parent, astroid.Class) and
    +                                (parent.qname() == 'logging.Logger' or
    +                                 any(ancestor.qname() == 'logging.Logger'
    +                                     for ancestor in parent.ancestors()))):
                                 return True, inferred._proxied.name
                 except astroid.exceptions.InferenceError:
                     pass
    @@ -150,9 +182,20 @@ def _check_log_method(self, node, name):
     
             if isinstance(node.args[format_pos], astroid.BinOp) and node.args[format_pos].op == '%':
                 self.add_message('logging-not-lazy', node=node)
    +        elif isinstance(node.args[format_pos], astroid.CallFunc):
    +            self._check_call_func(node.args[format_pos])
             elif isinstance(node.args[format_pos], astroid.Const):
                 self._check_format_string(node, format_pos)
     
    +    def _check_call_func(self, callfunc_node):
    +        """Checks that function call is not format_string.format().
    +
    +        Args:
    +          callfunc_node: CallFunc AST node to be checked.
    +        """
    +        if is_method_call(callfunc_node, ('str', 'unicode'), ('format',)):
    +            self.add_message('logging-format-interpolation', node=callfunc_node)
    +
         def _check_format_string(self, node, format_arg):
             """Checks that format string tokens match the supplied arguments.
     
    @@ -166,7 +209,7 @@ def _check_format_string(self, node, format_arg):
                 # don't check any further.
                 return
             format_string = node.args[format_arg].value
    -        if not isinstance(format_string, basestring):
    +        if not isinstance(format_string, six.string_types):
                 # If the log format is constant non-string (e.g. logging.debug(5)),
                 # ensure there are no arguments.
                 required_num_args = 0
    @@ -178,7 +221,7 @@ def _check_format_string(self, node, format_arg):
                         # Keyword checking on logging strings is complicated by
                         # special keywords - out of scope.
                         return
    -            except utils.UnsupportedFormatCharacter, ex:
    +            except utils.UnsupportedFormatCharacter as ex:
                     char = format_string[ex.index]
                     self.add_message('logging-unsupported-format', node=node,
                                      args=(char, ord(char), ex.index))
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py b/pymode/libs/pylint/checkers/misc.py
    similarity index 64%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py
    rename to pymode/libs/pylint/checkers/misc.py
    index d1b7c216..7fbe70bf 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/misc.py
    +++ b/pymode/libs/pylint/checkers/misc.py
    @@ -21,6 +21,7 @@
     
     from pylint.interfaces import IRawChecker
     from pylint.checkers import BaseChecker
    +import six
     
     
     MSGS = {
    @@ -32,10 +33,11 @@
                   'Used when a source line cannot be decoded using the specified '
                   'source file encoding.',
                   {'maxversion': (3, 0)}),
    -    }
    +}
     
     
     class EncodingChecker(BaseChecker):
    +
         """checks for:
         * warning notes in the code like FIXME, XXX
         * encoding issues.
    @@ -47,22 +49,32 @@ class EncodingChecker(BaseChecker):
         msgs = MSGS
     
         options = (('notes',
    -                {'type' : 'csv', 'metavar' : '',
    -                 'default' : ('FIXME', 'XXX', 'TODO'),
    -                 'help' : 'List of note tags to take in consideration, \
    -separated by a comma.'
    -                 }),
    -               )
    +                {'type': 'csv', 'metavar': '',
    +                 'default': ('FIXME', 'XXX', 'TODO'),
    +                 'help': ('List of note tags to take in consideration, '
    +                          'separated by a comma.')}),)
     
         def _check_note(self, notes, lineno, line):
    +        # First, simply check if the notes are in the line at all. This is an
    +        # optimisation to prevent using the regular expression on every line,
    +        # but rather only on lines which may actually contain one of the notes.
    +        # This prevents a pathological problem with lines that are hundreds
    +        # of thousands of characters long.
    +        for note in self.config.notes:
    +            if note in line:
    +                break
    +        else:
    +            return
    +
             match = notes.search(line)
    -        if match:
    -            self.add_message('fixme', args=line[match.start():-1], line=lineno)
    +        if not match:
    +            return
    +        self.add_message('fixme', args=line[match.start(1):-1], line=lineno)
     
         def _check_encoding(self, lineno, line, file_encoding):
             try:
    -            return unicode(line, file_encoding)
    -        except UnicodeDecodeError, ex:
    +            return six.text_type(line, file_encoding)
    +        except UnicodeDecodeError as ex:
                 self.add_message('invalid-encoded-data', line=lineno,
                                  args=(file_encoding, ex.args[2]))
     
    @@ -70,20 +82,22 @@ def process_module(self, module):
             """inspect the source file to find encoding problem or fixmes like
             notes
             """
    -        stream = module.file_stream
    -        stream.seek(0) # XXX may be removed with astroid > 0.23
             if self.config.notes:
    -            notes = re.compile('|'.join(self.config.notes))
    +            notes = re.compile(
    +                r'.*?#\s*(%s)(:*\s*.+)' % "|".join(self.config.notes))
             else:
                 notes = None
             if module.file_encoding:
                 encoding = module.file_encoding
             else:
                 encoding = 'ascii'
    -        for lineno, line in enumerate(stream):
    -            line = self._check_encoding(lineno+1, line, encoding)
    -            if line is not None and notes:
    -                self._check_note(notes, lineno+1, line)
    +
    +        with module.stream() as stream:
    +            for lineno, line in enumerate(stream):
    +                line = self._check_encoding(lineno + 1, line, encoding)
    +                if line is not None and notes:
    +                    self._check_note(notes, lineno + 1, line)
    +
     
     def register(linter):
         """required method to auto register this checker"""
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py b/pymode/libs/pylint/checkers/newstyle.py
    similarity index 76%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py
    rename to pymode/libs/pylint/checkers/newstyle.py
    index f801c443..f74e7f15 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/newstyle.py
    +++ b/pymode/libs/pylint/checkers/newstyle.py
    @@ -19,9 +19,13 @@
     
     import astroid
     
    -from pylint.interfaces import IAstroidChecker
    +from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import check_messages
    +from pylint.checkers.utils import (
    +    check_messages,
    +    has_known_bases,
    +    node_frame_class,
    +)
     
     MSGS = {
         'E1001': ('Use of __slots__ on an old style class',
    @@ -43,7 +47,7 @@
                   {'maxversion': (3, 0)}),
         'W1001': ('Use of "property" on an old style class',
                   'property-on-old-class',
    -              'Used when PyLint detect the use of the builtin "property" \
    +              'Used when Pylint detect the use of the builtin "property" \
                   on an old style class while this is relying on new style \
                   classes features.',
                   {'maxversion': (3, 0)}),
    @@ -74,26 +78,35 @@ class NewStyleConflictChecker(BaseChecker):
     
         @check_messages('slots-on-old-class', 'old-style-class')
         def visit_class(self, node):
    -        """check __slots__ usage
    +        """ Check __slots__ in old style classes and old
    +        style class definition.
             """
             if '__slots__' in node and not node.newstyle:
    -            self.add_message('slots-on-old-class', node=node)
    +            confidence = (INFERENCE if has_known_bases(node)
    +                          else INFERENCE_FAILURE)
    +            self.add_message('slots-on-old-class', node=node,
    +                             confidence=confidence)
             # The node type could be class, exception, metaclass, or
             # interface.  Presumably, the non-class-type nodes would always
             # have an explicit base class anyway.
    -        if not node.bases and node.type == 'class':
    -            self.add_message('old-style-class', node=node)
    +        if not node.bases and node.type == 'class' and not node.metaclass():
    +            # We use confidence HIGH here because this message should only ever
    +            # be emitted for classes at the root of the inheritance hierarchyself.
    +            self.add_message('old-style-class', node=node, confidence=HIGH)
     
         @check_messages('property-on-old-class')
         def visit_callfunc(self, node):
             """check property usage"""
             parent = node.parent.frame()
             if (isinstance(parent, astroid.Class) and
    -            not parent.newstyle and
    -            isinstance(node.func, astroid.Name)):
    +                not parent.newstyle and
    +                isinstance(node.func, astroid.Name)):
    +            confidence = (INFERENCE if has_known_bases(parent)
    +                          else INFERENCE_FAILURE)
                 name = node.func.name
                 if name == 'property':
    -                self.add_message('property-on-old-class', node=node)
    +                self.add_message('property-on-old-class', node=node,
    +                                 confidence=confidence)
     
         @check_messages('super-on-old-class', 'bad-super-call', 'missing-super-argument')
         def visit_function(self, node):
    @@ -103,6 +116,9 @@ def visit_function(self, node):
                 return
             klass = node.parent.frame()
             for stmt in node.nodes_of_class(astroid.CallFunc):
    +            if node_frame_class(stmt) != node_frame_class(node):
    +                # Don't look down in other scopes.
    +                continue
                 expr = stmt.func
                 if not isinstance(expr, astroid.Getattr):
                     continue
    @@ -111,9 +127,12 @@ def visit_function(self, node):
                 if isinstance(call, astroid.CallFunc) and \
                    isinstance(call.func, astroid.Name) and \
                    call.func.name == 'super':
    +                confidence = (INFERENCE if has_known_bases(klass)
    +                              else INFERENCE_FAILURE)
                     if not klass.newstyle:
                         # super should not be used on an old style class
    -                    self.add_message('super-on-old-class', node=node)
    +                    self.add_message('super-on-old-class', node=node,
    +                                     confidence=confidence)
                     else:
                         # super first arg should be the class
                         if not call.args and sys.version_info[0] == 3:
    @@ -121,13 +140,14 @@ def visit_function(self, node):
                             continue
     
                         try:
    -                        supcls = (call.args and call.args[0].infer().next()
    +                        supcls = (call.args and next(call.args[0].infer())
                                       or None)
                         except astroid.InferenceError:
                             continue
     
                         if supcls is None:
    -                        self.add_message('missing-super-argument', node=call)
    +                        self.add_message('missing-super-argument', node=call,
    +                                         confidence=confidence)
                             continue
     
                         if klass is not supcls:
    @@ -143,7 +163,8 @@ def visit_function(self, node):
                             if name is not None:
                                 self.add_message('bad-super-call',
                                                  node=call,
    -                                             args=(name, ))
    +                                             args=(name, ),
    +                                             confidence=confidence)
     
     
     def register(linter):
    diff --git a/pymode/libs/pylint/checkers/python3.py b/pymode/libs/pylint/checkers/python3.py
    new file mode 100644
    index 00000000..837cbef1
    --- /dev/null
    +++ b/pymode/libs/pylint/checkers/python3.py
    @@ -0,0 +1,581 @@
    +# Copyright 2014 Google Inc.
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Check Python 2 code for Python 2/3 source-compatible issues."""
    +from __future__ import absolute_import, print_function
    +
    +import re
    +import tokenize
    +
    +import astroid
    +from astroid import bases
    +from pylint import checkers, interfaces
    +from pylint.utils import WarningScope
    +from pylint.checkers import utils
    +
    +
    +_ZERO = re.compile("^0+$")
    +
    +def _is_old_octal(literal):
    +    if _ZERO.match(literal):
    +        return False
    +    if re.match('0\d+', literal):
    +        try:
    +            int(literal, 8)
    +        except ValueError:
    +            return False
    +        return True
    +
    +def _check_dict_node(node):
    +    inferred_types = set()
    +    try:
    +        inferred = node.infer()
    +        for inferred_node in inferred:
    +            inferred_types.add(inferred_node)
    +    except (astroid.InferenceError, astroid.UnresolvableName):
    +        pass
    +    return (not inferred_types
    +            or any(isinstance(x, astroid.Dict) for x in inferred_types))
    +
    +def _is_builtin(node):
    +    return getattr(node, 'name', None) in ('__builtin__', 'builtins')
    +
    +_accepts_iterator = {'iter', 'list', 'tuple', 'sorted', 'set', 'sum', 'any',
    +                     'all', 'enumerate', 'dict'}
    +
    +def _in_iterating_context(node):
    +    """Check if the node is being used as an iterator.
    +
    +    Definition is taken from lib2to3.fixer_util.in_special_context().
    +    """
    +    parent = node.parent
    +    # Since a call can't be the loop variant we only need to know if the node's
    +    # parent is a 'for' loop to know it's being used as the iterator for the
    +    # loop.
    +    if isinstance(parent, astroid.For):
    +        return True
    +    # Need to make sure the use of the node is in the iterator part of the
    +    # comprehension.
    +    elif isinstance(parent, astroid.Comprehension):
    +        if parent.iter == node:
    +            return True
    +    # Various built-ins can take in an iterable or list and lead to the same
    +    # value.
    +    elif isinstance(parent, astroid.CallFunc):
    +        if isinstance(parent.func, astroid.Name):
    +            parent_scope = parent.func.lookup(parent.func.name)[0]
    +            if _is_builtin(parent_scope) and parent.func.name in _accepts_iterator:
    +                return True
    +        elif isinstance(parent.func, astroid.Getattr):
    +            if parent.func.attrname == 'join':
    +                return True
    +    # If the call is in an unpacking, there's no need to warn,
    +    # since it can be considered iterating.
    +    elif (isinstance(parent, astroid.Assign) and
    +          isinstance(parent.targets[0], (astroid.List, astroid.Tuple))):
    +        if len(parent.targets[0].elts) > 1:
    +            return True
    +    return False
    +
    +
    +class Python3Checker(checkers.BaseChecker):
    +
    +    __implements__ = interfaces.IAstroidChecker
    +    enabled = False
    +    name = 'python3'
    +
    +    msgs = {
    +        # Errors for what will syntactically break in Python 3, warnings for
    +        # everything else.
    +        'E1601': ('print statement used',
    +                  'print-statement',
    +                  'Used when a print statement is used '
    +                  '(`print` is a function in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'E1602': ('Parameter unpacking specified',
    +                  'parameter-unpacking',
    +                  'Used when parameter unpacking is specified for a function'
    +                  "(Python 3 doesn't allow it)",
    +                  {'maxversion': (3, 0)}),
    +        'E1603': ('Implicit unpacking of exceptions is not supported '
    +                  'in Python 3',
    +                  'unpacking-in-except',
    +                  'Python3 will not allow implicit unpacking of '
    +                  'exceptions in except clauses. '
    +                  'See http://www.python.org/dev/peps/pep-3110/',
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W0712', 'unpacking-in-except')]}),
    +        'E1604': ('Use raise ErrorClass(args) instead of '
    +                  'raise ErrorClass, args.',
    +                  'old-raise-syntax',
    +                  "Used when the alternate raise syntax "
    +                  "'raise foo, bar' is used "
    +                  "instead of 'raise foo(bar)'.",
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W0121', 'old-raise-syntax')]}),
    +        'E1605': ('Use of the `` operator',
    +                  'backtick',
    +                  'Used when the deprecated "``" (backtick) operator is used '
    +                  'instead  of the str() function.',
    +                  {'scope': WarningScope.NODE,
    +                   'maxversion': (3, 0),
    +                   'old_names': [('W0333', 'backtick')]}),
    +        'W1601': ('apply built-in referenced',
    +                  'apply-builtin',
    +                  'Used when the apply built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1602': ('basestring built-in referenced',
    +                  'basestring-builtin',
    +                  'Used when the basestring built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1603': ('buffer built-in referenced',
    +                  'buffer-builtin',
    +                  'Used when the buffer built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1604': ('cmp built-in referenced',
    +                  'cmp-builtin',
    +                  'Used when the cmp built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1605': ('coerce built-in referenced',
    +                  'coerce-builtin',
    +                  'Used when the coerce built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1606': ('execfile built-in referenced',
    +                  'execfile-builtin',
    +                  'Used when the execfile built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1607': ('file built-in referenced',
    +                  'file-builtin',
    +                  'Used when the file built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1608': ('long built-in referenced',
    +                  'long-builtin',
    +                  'Used when the long built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1609': ('raw_input built-in referenced',
    +                  'raw_input-builtin',
    +                  'Used when the raw_input built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1610': ('reduce built-in referenced',
    +                  'reduce-builtin',
    +                  'Used when the reduce built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1611': ('StandardError built-in referenced',
    +                  'standarderror-builtin',
    +                  'Used when the StandardError built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1612': ('unicode built-in referenced',
    +                  'unicode-builtin',
    +                  'Used when the unicode built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1613': ('xrange built-in referenced',
    +                  'xrange-builtin',
    +                  'Used when the xrange built-in function is referenced '
    +                  '(missing from Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1614': ('__coerce__ method defined',
    +                  'coerce-method',
    +                  'Used when a __coerce__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1615': ('__delslice__ method defined',
    +                  'delslice-method',
    +                  'Used when a __delslice__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1616': ('__getslice__ method defined',
    +                  'getslice-method',
    +                  'Used when a __getslice__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1617': ('__setslice__ method defined',
    +                  'setslice-method',
    +                  'Used when a __setslice__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1618': ('import missing `from __future__ import absolute_import`',
    +                  'no-absolute-import',
    +                  'Used when an import is not accompanied by '
    +                  '``from __future__ import absolute_import`` '
    +                  '(default behaviour in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1619': ('division w/o __future__ statement',
    +                  'old-division',
    +                  'Used for non-floor division w/o a float literal or '
    +                  '``from __future__ import division`` '
    +                  '(Python 3 returns a float for int division unconditionally)',
    +                  {'maxversion': (3, 0)}),
    +        'W1620': ('Calling a dict.iter*() method',
    +                  'dict-iter-method',
    +                  'Used for calls to dict.iterkeys(), itervalues() or iteritems() '
    +                  '(Python 3 lacks these methods)',
    +                  {'maxversion': (3, 0)}),
    +        'W1621': ('Calling a dict.view*() method',
    +                  'dict-view-method',
    +                  'Used for calls to dict.viewkeys(), viewvalues() or viewitems() '
    +                  '(Python 3 lacks these methods)',
    +                  {'maxversion': (3, 0)}),
    +        'W1622': ('Called a next() method on an object',
    +                  'next-method-called',
    +                  "Used when an object's next() method is called "
    +                  '(Python 3 uses the next() built-in function)',
    +                  {'maxversion': (3, 0)}),
    +        'W1623': ("Assigning to a class' __metaclass__ attribute",
    +                  'metaclass-assignment',
    +                  "Used when a metaclass is specified by assigning to __metaclass__ "
    +                  '(Python 3 specifies the metaclass as a class statement argument)',
    +                  {'maxversion': (3, 0)}),
    +        'W1624': ('Indexing exceptions will not work on Python 3',
    +                  'indexing-exception',
    +                  'Indexing exceptions will not work on Python 3. Use '
    +                  '`exception.args[index]` instead.',
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W0713', 'indexing-exception')]}),
    +        'W1625': ('Raising a string exception',
    +                  'raising-string',
    +                  'Used when a string exception is raised. This will not '
    +                  'work on Python 3.',
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W0701', 'raising-string')]}),
    +        'W1626': ('reload built-in referenced',
    +                  'reload-builtin',
    +                  'Used when the reload built-in function is referenced '
    +                  '(missing from Python 3). You can use instead imp.reload '
    +                  'or importlib.reload.',
    +                  {'maxversion': (3, 0)}),
    +        'W1627': ('__oct__ method defined',
    +                  'oct-method',
    +                  'Used when a __oct__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1628': ('__hex__ method defined',
    +                  'hex-method',
    +                  'Used when a __hex__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1629': ('__nonzero__ method defined',
    +                  'nonzero-method',
    +                  'Used when a __nonzero__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1630': ('__cmp__ method defined',
    +                  'cmp-method',
    +                  'Used when a __cmp__ method is defined '
    +                  '(method is not used by Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        # 'W1631': replaced by W1636
    +        'W1632': ('input built-in referenced',
    +                  'input-builtin',
    +                  'Used when the input built-in is referenced '
    +                  '(backwards-incompatible semantics in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1633': ('round built-in referenced',
    +                  'round-builtin',
    +                  'Used when the round built-in is referenced '
    +                  '(backwards-incompatible semantics in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1634': ('intern built-in referenced',
    +                  'intern-builtin',
    +                  'Used when the intern built-in is referenced '
    +                  '(Moved to sys.intern in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1635': ('unichr built-in referenced',
    +                  'unichr-builtin',
    +                  'Used when the unichr built-in is referenced '
    +                  '(Use chr in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1636': ('map built-in referenced when not iterating',
    +                  'map-builtin-not-iterating',
    +                  'Used when the map built-in is referenced in a non-iterating '
    +                  'context (returns an iterator in Python 3)',
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W1631', 'implicit-map-evaluation')]}),
    +        'W1637': ('zip built-in referenced when not iterating',
    +                  'zip-builtin-not-iterating',
    +                  'Used when the zip built-in is referenced in a non-iterating '
    +                  'context (returns an iterator in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1638': ('range built-in referenced when not iterating',
    +                  'range-builtin-not-iterating',
    +                  'Used when the range built-in is referenced in a non-iterating '
    +                  'context (returns an iterator in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1639': ('filter built-in referenced when not iterating',
    +                  'filter-builtin-not-iterating',
    +                  'Used when the filter built-in is referenced in a non-iterating '
    +                  'context (returns an iterator in Python 3)',
    +                  {'maxversion': (3, 0)}),
    +        'W1640': ('Using the cmp argument for list.sort / sorted',
    +                  'using-cmp-argument',
    +                  'Using the cmp argument for list.sort or the sorted '
    +                  'builtin should be avoided, since it was removed in '
    +                  'Python 3. Using either `key` or `functools.cmp_to_key` '
    +                  'should be preferred.',
    +                  {'maxversion': (3, 0)}),
    +    }
    +
    +    _bad_builtins = frozenset([
    +        'apply',
    +        'basestring',
    +        'buffer',
    +        'cmp',
    +        'coerce',
    +        'execfile',
    +        'file',
    +        'input',  # Not missing, but incompatible semantics
    +        'intern',
    +        'long',
    +        'raw_input',
    +        'reduce',
    +        'round',  # Not missing, but incompatible semantics
    +        'StandardError',
    +        'unichr',
    +        'unicode',
    +        'xrange',
    +        'reload',
    +    ])
    +
    +    _unused_magic_methods = frozenset([
    +        '__coerce__',
    +        '__delslice__',
    +        '__getslice__',
    +        '__setslice__',
    +        '__oct__',
    +        '__hex__',
    +        '__nonzero__',
    +        '__cmp__',
    +    ])
    +
    +    def __init__(self, *args, **kwargs):
    +        self._future_division = False
    +        self._future_absolute_import = False
    +        super(Python3Checker, self).__init__(*args, **kwargs)
    +
    +    def visit_module(self, node): # pylint: disable=unused-argument
    +        """Clear checker state after previous module."""
    +        self._future_division = False
    +        self._future_absolute_import = False
    +
    +    def visit_function(self, node):
    +        if node.is_method() and node.name in self._unused_magic_methods:
    +            method_name = node.name
    +            if node.name.startswith('__'):
    +                method_name = node.name[2:-2]
    +            self.add_message(method_name + '-method', node=node)
    +
    +    @utils.check_messages('parameter-unpacking')
    +    def visit_arguments(self, node):
    +        for arg in node.args:
    +            if isinstance(arg, astroid.Tuple):
    +                self.add_message('parameter-unpacking', node=arg)
    +
    +    def visit_name(self, node):
    +        """Detect when a "bad" built-in is referenced."""
    +        found_node = node.lookup(node.name)[0]
    +        if _is_builtin(found_node):
    +            if node.name in self._bad_builtins:
    +                message = node.name.lower() + '-builtin'
    +                self.add_message(message, node=node)
    +
    +    @utils.check_messages('print-statement')
    +    def visit_print(self, node):
    +        self.add_message('print-statement', node=node)
    +
    +    @utils.check_messages('no-absolute-import')
    +    def visit_from(self, node):
    +        if node.modname == '__future__':
    +            for name, _ in node.names:
    +                if name == 'division':
    +                    self._future_division = True
    +                elif name == 'absolute_import':
    +                    self._future_absolute_import = True
    +        elif not self._future_absolute_import:
    +            self.add_message('no-absolute-import', node=node)
    +
    +    @utils.check_messages('no-absolute-import')
    +    def visit_import(self, node):
    +        if not self._future_absolute_import:
    +            self.add_message('no-absolute-import', node=node)
    +
    +    @utils.check_messages('metaclass-assignment')
    +    def visit_class(self, node):
    +        if '__metaclass__' in node.locals:
    +            self.add_message('metaclass-assignment', node=node)
    +
    +    @utils.check_messages('old-division')
    +    def visit_binop(self, node):
    +        if not self._future_division and node.op == '/':
    +            for arg in (node.left, node.right):
    +                if isinstance(arg, astroid.Const) and isinstance(arg.value, float):
    +                    break
    +            else:
    +                self.add_message('old-division', node=node)
    +
    +    def _check_cmp_argument(self, node):
    +        # Check that the `cmp` argument is used
    +        args = []
    +        if (isinstance(node.func, astroid.Getattr)
    +                and node.func.attrname == 'sort'):
    +            inferred = utils.safe_infer(node.func.expr)
    +            if not inferred:
    +                return
    +
    +            builtins_list = "{}.list".format(bases.BUILTINS)
    +            if (isinstance(inferred, astroid.List)
    +                    or inferred.qname() == builtins_list):
    +                args = node.args
    +
    +        elif (isinstance(node.func, astroid.Name)
    +                and node.func.name == 'sorted'):
    +            inferred = utils.safe_infer(node.func)
    +            if not inferred:
    +                return
    +
    +            builtins_sorted = "{}.sorted".format(bases.BUILTINS)
    +            if inferred.qname() == builtins_sorted:
    +                args = node.args
    +
    +        for arg in args:
    +            if isinstance(arg, astroid.Keyword) and arg.arg == 'cmp':
    +                self.add_message('using-cmp-argument', node=node)
    +                return
    +
    +    def visit_callfunc(self, node):
    +        self._check_cmp_argument(node)
    +
    +        if isinstance(node.func, astroid.Getattr):
    +            if any([node.args, node.starargs, node.kwargs]):
    +                return
    +            if node.func.attrname == 'next':
    +                self.add_message('next-method-called', node=node)
    +            else:
    +                if _check_dict_node(node.func.expr):
    +                    if node.func.attrname in ('iterkeys', 'itervalues', 'iteritems'):
    +                        self.add_message('dict-iter-method', node=node)
    +                    elif node.func.attrname in ('viewkeys', 'viewvalues', 'viewitems'):
    +                        self.add_message('dict-view-method', node=node)
    +        elif isinstance(node.func, astroid.Name):
    +            found_node = node.func.lookup(node.func.name)[0]
    +            if _is_builtin(found_node):
    +                if node.func.name in ('filter', 'map', 'range', 'zip'):
    +                    if not _in_iterating_context(node):
    +                        checker = '{}-builtin-not-iterating'.format(node.func.name)
    +                        self.add_message(checker, node=node)
    +
    +
    +    @utils.check_messages('indexing-exception')
    +    def visit_subscript(self, node):
    +        """ Look for indexing exceptions. """
    +        try:
    +            for infered in node.value.infer():
    +                if not isinstance(infered, astroid.Instance):
    +                    continue
    +                if utils.inherit_from_std_ex(infered):
    +                    self.add_message('indexing-exception', node=node)
    +        except astroid.InferenceError:
    +            return
    +
    +    @utils.check_messages('unpacking-in-except')
    +    def visit_excepthandler(self, node):
    +        """Visit an except handler block and check for exception unpacking."""
    +        if isinstance(node.name, (astroid.Tuple, astroid.List)):
    +            self.add_message('unpacking-in-except', node=node)
    +
    +    @utils.check_messages('backtick')
    +    def visit_backquote(self, node):
    +        self.add_message('backtick', node=node)
    +
    +    @utils.check_messages('raising-string', 'old-raise-syntax')
    +    def visit_raise(self, node):
    +        """Visit a raise statement and check for raising
    +        strings or old-raise-syntax.
    +        """
    +        if (node.exc is not None and
    +                node.inst is not None and
    +                node.tback is None):
    +            self.add_message('old-raise-syntax', node=node)
    +
    +        # Ignore empty raise.
    +        if node.exc is None:
    +            return
    +        expr = node.exc
    +        if self._check_raise_value(node, expr):
    +            return
    +        else:
    +            try:
    +                value = next(astroid.unpack_infer(expr))
    +            except astroid.InferenceError:
    +                return
    +            self._check_raise_value(node, value)
    +
    +    def _check_raise_value(self, node, expr):
    +        if isinstance(expr, astroid.Const):
    +            value = expr.value
    +            if isinstance(value, str):
    +                self.add_message('raising-string', node=node)
    +                return True
    +
    +
    +class Python3TokenChecker(checkers.BaseTokenChecker):
    +    __implements__ = interfaces.ITokenChecker
    +    name = 'python3'
    +    enabled = False
    +
    +    msgs = {
    +        'E1606': ('Use of long suffix',
    +                  'long-suffix',
    +                  'Used when "l" or "L" is used to mark a long integer. '
    +                  'This will not work in Python 3, since `int` and `long` '
    +                  'types have merged.',
    +                  {'maxversion': (3, 0)}),
    +        'E1607': ('Use of the <> operator',
    +                  'old-ne-operator',
    +                  'Used when the deprecated "<>" operator is used instead '
    +                  'of "!=". This is removed in Python 3.',
    +                  {'maxversion': (3, 0),
    +                   'old_names': [('W0331', 'old-ne-operator')]}),
    +        'E1608': ('Use of old octal literal',
    +                  'old-octal-literal',
    +                  'Usen when encountering the old octal syntax, '
    +                  'removed in Python 3. To use the new syntax, '
    +                  'prepend 0o on the number.',
    +                  {'maxversion': (3, 0)}),
    +    }
    +
    +    def process_tokens(self, tokens):
    +        for idx, (tok_type, token, start, _, _) in enumerate(tokens):
    +            if tok_type == tokenize.NUMBER:
    +                if token.lower().endswith('l'):
    +                    # This has a different semantic than lowercase-l-suffix.
    +                    self.add_message('long-suffix', line=start[0])
    +                elif _is_old_octal(token):
    +                    self.add_message('old-octal-literal', line=start[0])
    +            if tokens[idx][1] == '<>':
    +                self.add_message('old-ne-operator', line=tokens[idx][2][0])
    +
    +
    +def register(linter):
    +    linter.register_checker(Python3Checker(linter))
    +    linter.register_checker(Python3TokenChecker(linter))
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py b/pymode/libs/pylint/checkers/raw_metrics.py
    similarity index 100%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/raw_metrics.py
    rename to pymode/libs/pylint/checkers/raw_metrics.py
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py b/pymode/libs/pylint/checkers/similar.py
    similarity index 91%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py
    rename to pymode/libs/pylint/checkers/similar.py
    index cf671bf6..95420776 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/similar.py
    +++ b/pymode/libs/pylint/checkers/similar.py
    @@ -16,14 +16,18 @@
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     """a similarities / code duplication command line tool and pylint checker
     """
    +from __future__ import print_function
     import sys
    -from itertools import izip
    +from collections import defaultdict
     
     from logilab.common.ureports import Table
     
     from pylint.interfaces import IRawChecker
     from pylint.checkers import BaseChecker, table_lines_from_stats
     
    +import six
    +from six.moves import zip
    +
     
     class Similar(object):
         """finds copy-pasted lines of code in a project"""
    @@ -38,7 +42,6 @@ def __init__(self, min_lines=4, ignore_comments=False,
     
         def append_stream(self, streamid, stream, encoding=None):
             """append a file to search for similarities"""
    -        stream.seek(0) # XXX may be removed with astroid > 0.23
             if encoding is None:
                 readlines = stream.readlines
             else:
    @@ -58,9 +61,9 @@ def run(self):
     
         def _compute_sims(self):
             """compute similarities in appended files"""
    -        no_duplicates = {}
    +        no_duplicates = defaultdict(list)
             for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
    -            duplicate = no_duplicates.setdefault(num, [])
    +            duplicate = no_duplicates[num]
                 for couples in duplicate:
                     if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
                         couples.add((lineset1, idx1))
    @@ -69,7 +72,7 @@ def _compute_sims(self):
                 else:
                     duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
             sims = []
    -        for num, ensembles in no_duplicates.iteritems():
    +        for num, ensembles in six.iteritems(no_duplicates):
                 for couples in ensembles:
                     sims.append((num, couples))
             sims.sort()
    @@ -80,19 +83,19 @@ def _display_sims(self, sims):
             """display computed similarities on stdout"""
             nb_lignes_dupliquees = 0
             for num, couples in sims:
    -            print
    -            print num, "similar lines in", len(couples), "files"
    +            print()
    +            print(num, "similar lines in", len(couples), "files")
                 couples = sorted(couples)
                 for lineset, idx in couples:
    -                print "==%s:%s" % (lineset.name, idx)
    +                print("==%s:%s" % (lineset.name, idx))
                 # pylint: disable=W0631
                 for line in lineset._real_lines[idx:idx+num]:
    -                print "  ", line.rstrip()
    +                print("  ", line.rstrip())
                 nb_lignes_dupliquees += num * (len(couples)-1)
             nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
    -        print "TOTAL lines=%s duplicates=%s percent=%.2f" \
    +        print("TOTAL lines=%s duplicates=%s percent=%.2f" \
                 % (nb_total_lignes, nb_lignes_dupliquees,
    -               nb_lignes_dupliquees*100. / nb_total_lignes)
    +               nb_lignes_dupliquees*100. / nb_total_lignes))
     
         def _find_common(self, lineset1, lineset2):
             """find similarities in the two given linesets"""
    @@ -107,7 +110,7 @@ def _find_common(self, lineset1, lineset2):
                 for index2 in find(lineset1[index1]):
                     non_blank = 0
                     for num, ((_, line1), (_, line2)) in enumerate(
    -                    izip(lines1(index1), lines2(index2))):
    +                        zip(lines1(index1), lines2(index2))):
                         if line1 != line2:
                             if non_blank > min_lines:
                                 yield num, lineset1, index1, lineset2, index2
    @@ -207,10 +210,10 @@ def find(self, stripped_line):
     
         def _mk_index(self):
             """create the index for this set"""
    -        index = {}
    +        index = defaultdict(list)
             for line_no, line in enumerate(self._stripped_lines):
                 if line:
    -                index.setdefault(line, []).append(line_no)
    +                index[line].append(line_no)
             return index
     
     
    @@ -249,16 +252,16 @@ class SimilarChecker(BaseChecker, Similar):
                    ('ignore-comments',
                     {'default' : True, 'type' : 'yn', 'metavar' : '',
                      'help': 'Ignore comments when computing similarities.'}
    -                ),
    +               ),
                    ('ignore-docstrings',
                     {'default' : True, 'type' : 'yn', 'metavar' : '',
                      'help': 'Ignore docstrings when computing similarities.'}
    -                ),
    +               ),
                    ('ignore-imports',
                     {'default' : False, 'type' : 'yn', 'metavar' : '',
                      'help': 'Ignore imports when computing similarities.'}
    -                ),
    -               )
    +               ),
    +              )
         # reports
         reports = (('RP0801', 'Duplication', report_similarities),)
     
    @@ -296,7 +299,10 @@ def process_module(self, node):
     
             stream must implement the readlines method
             """
    -        self.append_stream(self.linter.current_name, node.file_stream, node.file_encoding)
    +        with node.stream() as stream:
    +            self.append_stream(self.linter.current_name,
    +                               stream,
    +                               node.file_encoding)
     
         def close(self):
             """compute and display similarities on closing (i.e. end of parsing)"""
    @@ -323,10 +329,10 @@ def register(linter):
     
     def usage(status=0):
         """display command line usage information"""
    -    print "finds copy pasted blocks in a set of files"
    -    print
    -    print 'Usage: symilar [-d|--duplicates min_duplicated_lines] \
    -[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...'
    +    print("finds copy pasted blocks in a set of files")
    +    print()
    +    print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
    +[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
         sys.exit(status)
     
     def Run(argv=None):
    @@ -357,7 +363,8 @@ def Run(argv=None):
             usage(1)
         sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
         for filename in args:
    -        sim.append_stream(filename, open(filename))
    +        with open(filename) as stream:
    +            sim.append_stream(filename, stream)
         sim.run()
         sys.exit(0)
     
    diff --git a/pymode/libs/pylint/checkers/spelling.py b/pymode/libs/pylint/checkers/spelling.py
    new file mode 100644
    index 00000000..f6edd5db
    --- /dev/null
    +++ b/pymode/libs/pylint/checkers/spelling.py
    @@ -0,0 +1,250 @@
    +# Copyright 2014 Michal Nowikowski.
    +#
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Checker for spelling errors in comments and docstrings.
    +"""
    +
    +import sys
    +import tokenize
    +import string
    +import re
    +
    +if sys.version_info[0] >= 3:
    +    maketrans = str.maketrans
    +else:
    +    maketrans = string.maketrans
    +
    +from pylint.interfaces import ITokenChecker, IAstroidChecker
    +from pylint.checkers import BaseTokenChecker
    +from pylint.checkers.utils import check_messages
    +
    +try:
    +    import enchant
    +except ImportError:
    +    enchant = None
    +
    +if enchant is not None:
    +    br = enchant.Broker()
    +    dicts = br.list_dicts()
    +    dict_choices = [''] + [d[0] for d in dicts]
    +    dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
    +    dicts = ", ".join(dicts)
    +    instr = ""
    +else:
    +    dicts = "none"
    +    dict_choices = ['']
    +    instr = " To make it working install python-enchant package."
    +
    +table = maketrans("", "")
    +
    +class SpellingChecker(BaseTokenChecker):
    +    """Check spelling in comments and docstrings"""
    +    __implements__ = (ITokenChecker, IAstroidChecker)
    +    name = 'spelling'
    +    msgs = {
    +        'C0401': ('Wrong spelling of a word \'%s\' in a comment:\n%s\n'
    +                  '%s\nDid you mean: \'%s\'?',
    +                  'wrong-spelling-in-comment',
    +                  'Used when a word in comment is not spelled correctly.'),
    +        'C0402': ('Wrong spelling of a word \'%s\' in a docstring:\n%s\n'
    +                  '%s\nDid you mean: \'%s\'?',
    +                  'wrong-spelling-in-docstring',
    +                  'Used when a word in docstring is not spelled correctly.'),
    +        'C0403': ('Invalid characters %r in a docstring',
    +                  'invalid-characters-in-docstring',
    +                  'Used when a word in docstring cannot be checked by enchant.'),
    +        }
    +    options = (('spelling-dict',
    +                {'default' : '', 'type' : 'choice', 'metavar' : '',
    +                 'choices': dict_choices,
    +                 'help' : 'Spelling dictionary name. '
    +                          'Available dictionaries: %s.%s' % (dicts, instr)}),
    +               ('spelling-ignore-words',
    +                {'default' : '',
    +                 'type' : 'string',
    +                 'metavar' : '',
    +                 'help' : 'List of comma separated words that '
    +                          'should not be checked.'}),
    +               ('spelling-private-dict-file',
    +                {'default' : '',
    +                 'type' : 'string',
    +                 'metavar' : '',
    +                 'help' : 'A path to a file that contains private '
    +                          'dictionary; one word per line.'}),
    +               ('spelling-store-unknown-words',
    +                {'default' : 'n', 'type' : 'yn', 'metavar' : '',
    +                 'help' : 'Tells whether to store unknown words to '
    +                          'indicated private dictionary in '
    +                          '--spelling-private-dict-file option instead of '
    +                          'raising a message.'}),
    +              )
    +
    +    def open(self):
    +        self.initialized = False
    +        self.private_dict_file = None
    +
    +        if enchant is None:
    +            return
    +        dict_name = self.config.spelling_dict
    +        if not dict_name:
    +            return
    +
    +        self.ignore_list = [w.strip() for w in self.config.spelling_ignore_words.split(",")]
    +        # "param" appears in docstring in param description and
    +        # "pylint" appears in comments in pylint pragmas.
    +        self.ignore_list.extend(["param", "pylint"])
    +
    +        if self.config.spelling_private_dict_file:
    +            self.spelling_dict = enchant.DictWithPWL(
    +                dict_name, self.config.spelling_private_dict_file)
    +            self.private_dict_file = open(
    +                self.config.spelling_private_dict_file, "a")
    +        else:
    +            self.spelling_dict = enchant.Dict(dict_name)
    +
    +        if self.config.spelling_store_unknown_words:
    +            self.unknown_words = set()
    +
    +        # Prepare regex for stripping punctuation signs from text.
    +        # ' and _ are treated in a special way.
    +        puncts = string.punctuation.replace("'", "").replace("_", "")
    +        self.punctuation_regex = re.compile('[%s]' % re.escape(puncts))
    +        self.initialized = True
    +
    +    def close(self):
    +        if self.private_dict_file:
    +            self.private_dict_file.close()
    +
    +    def _check_spelling(self, msgid, line, line_num):
    +        line2 = line.strip()
    +        # Replace ['afadf with afadf (but preserve don't)
    +        line2 = re.sub("'([^a-zA-Z]|$)", " ", line2)
    +        # Replace afadf'] with afadf (but preserve don't)
    +        line2 = re.sub("([^a-zA-Z]|^)'", " ", line2)
    +        # Replace punctuation signs with space e.g. and/or -> and or
    +        line2 = self.punctuation_regex.sub(' ', line2)
    +
    +        words = []
    +        for word in line2.split():
    +            # Skip words with digits.
    +            if len(re.findall(r"\d", word)) > 0:
    +                continue
    +
    +            # Skip words with mixed big and small letters,
    +            # they are probaly class names.
    +            if (len(re.findall("[A-Z]", word)) > 0 and
    +                    len(re.findall("[a-z]", word)) > 0 and
    +                    len(word) > 2):
    +                continue
    +
    +            # Skip words with _ - they are probably function parameter names.
    +            if word.count('_') > 0:
    +                continue
    +
    +            words.append(word)
    +
    +        # Go through words and check them.
    +        for word in words:
    +            # Skip words from ignore list.
    +            if word in self.ignore_list:
    +                continue
    +
    +            orig_word = word
    +            word = word.lower()
    +
    +            # Strip starting u' from unicode literals and r' from raw strings.
    +            if (word.startswith("u'") or
    +                    word.startswith('u"') or
    +                    word.startswith("r'") or
    +                    word.startswith('r"')) and len(word) > 2:
    +                word = word[2:]
    +
    +            # If it is a known word, then continue.
    +            try:
    +                if self.spelling_dict.check(word):
    +                    continue
    +            except enchant.errors.Error:
    +                # this can only happen in docstrings, not comments
    +                self.add_message('invalid-characters-in-docstring',
    +                                 line=line_num, args=(word,))
    +                continue
    +
    +            # Store word to private dict or raise a message.
    +            if self.config.spelling_store_unknown_words:
    +                if word not in self.unknown_words:
    +                    self.private_dict_file.write("%s\n" % word)
    +                    self.unknown_words.add(word)
    +            else:
    +                # Present up to 4 suggestions.
    +                # TODO: add support for customising this.
    +                suggestions = self.spelling_dict.suggest(word)[:4]
    +
    +                m = re.search(r"(\W|^)(%s)(\W|$)" % word, line.lower())
    +                if m:
    +                    # Start position of second group in regex.
    +                    col = m.regs[2][0]
    +                else:
    +                    col = line.lower().index(word)
    +                indicator = (" " * col) + ("^" * len(word))
    +
    +                self.add_message(msgid, line=line_num,
    +                                 args=(orig_word, line,
    +                                       indicator,
    +                                       "' or '".join(suggestions)))
    +
    +    def process_tokens(self, tokens):
    +        if not self.initialized:
    +            return
    +
    +        # Process tokens and look for comments.
    +        for (tok_type, token, (start_row, _), _, _) in tokens:
    +            if tok_type == tokenize.COMMENT:
    +                self._check_spelling('wrong-spelling-in-comment',
    +                                     token, start_row)
    +
    +    @check_messages('wrong-spelling-in-docstring')
    +    def visit_module(self, node):
    +        if not self.initialized:
    +            return
    +        self._check_docstring(node)
    +
    +    @check_messages('wrong-spelling-in-docstring')
    +    def visit_class(self, node):
    +        if not self.initialized:
    +            return
    +        self._check_docstring(node)
    +
    +    @check_messages('wrong-spelling-in-docstring')
    +    def visit_function(self, node):
    +        if not self.initialized:
    +            return
    +        self._check_docstring(node)
    +
    +    def _check_docstring(self, node):
    +        """check the node has any spelling errors"""
    +        docstring = node.doc
    +        if not docstring:
    +            return
    +
    +        start_line = node.lineno + 1
    +
    +        # Go through lines of docstring
    +        for idx, line in enumerate(docstring.splitlines()):
    +            self._check_spelling('wrong-spelling-in-docstring',
    +                                 line, start_line + idx)
    +
    +
    +def register(linter):
    +    """required method to auto register this checker """
    +    linter.register_checker(SpellingChecker(linter))
    diff --git a/pymode/libs/pylint/checkers/stdlib.py b/pymode/libs/pylint/checkers/stdlib.py
    new file mode 100644
    index 00000000..a3a61063
    --- /dev/null
    +++ b/pymode/libs/pylint/checkers/stdlib.py
    @@ -0,0 +1,216 @@
    +# Copyright 2012 Google Inc.
    +#
    +# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Checkers for various standard library functions."""
    +
    +import six
    +import sys
    +
    +import astroid
    +from astroid.bases import Instance
    +
    +from pylint.interfaces import IAstroidChecker
    +from pylint.checkers import BaseChecker
    +from pylint.checkers import utils
    +
    +
    +TYPECHECK_COMPARISON_OPERATORS = frozenset(('is', 'is not', '==', '!=', 'in', 'not in'))
    +LITERAL_NODE_TYPES = (astroid.Const, astroid.Dict, astroid.List, astroid.Set)
    +
    +if sys.version_info >= (3, 0):
    +    OPEN_MODULE = '_io'
    +    TYPE_QNAME = 'builtins.type'
    +else:
    +    OPEN_MODULE = '__builtin__'
    +    TYPE_QNAME = '__builtin__.type'
    +
    +
    +def _check_mode_str(mode):
    +    # check type
    +    if not isinstance(mode, six.string_types):
    +        return False
    +    # check syntax
    +    modes = set(mode)
    +    _mode = "rwatb+U"
    +    creating = False
    +    if six.PY3:
    +        _mode += "x"
    +        creating = "x" in modes
    +    if modes - set(_mode) or len(mode) > len(modes):
    +        return False
    +    # check logic
    +    reading = "r" in modes
    +    writing = "w" in modes
    +    appending = "a" in modes
    +    text = "t" in modes
    +    binary = "b" in modes
    +    if "U" in modes:
    +        if writing or appending or creating and six.PY3:
    +            return False
    +        reading = True
    +        if not six.PY3:
    +            binary = True
    +    if text and binary:
    +        return False
    +    total = reading + writing + appending + (creating if six.PY3 else 0)
    +    if total > 1:
    +        return False
    +    if not (reading or writing or appending or creating and six.PY3):
    +        return False
    +    # other 2.x constraints
    +    if not six.PY3:
    +        if "U" in mode:
    +            mode = mode.replace("U", "")
    +            if "r" not in mode:
    +                mode = "r" + mode
    +        return mode[0] in ("r", "w", "a", "U")
    +    return True
    +
    +
    +def _is_one_arg_pos_call(call):
    +    """Is this a call with exactly 1 argument,
    +    where that argument is positional?
    +    """
    +    return (isinstance(call, astroid.CallFunc)
    +            and len(call.args) == 1
    +            and not isinstance(call.args[0], astroid.Keyword))
    +
    +
    +class StdlibChecker(BaseChecker):
    +    __implements__ = (IAstroidChecker,)
    +    name = 'stdlib'
    +
    +    msgs = {
    +        'W1501': ('"%s" is not a valid mode for open.',
    +                  'bad-open-mode',
    +                  'Python supports: r, w, a[, x] modes with b, +, '
    +                  'and U (only with r) options. '
    +                  'See http://docs.python.org/2/library/functions.html#open'),
    +        'W1502': ('Using datetime.time in a boolean context.',
    +                  'boolean-datetime',
    +                  'Using datetime.time in a boolean context can hide '
    +                  'subtle bugs when the time they represent matches '
    +                  'midnight UTC. This behaviour was fixed in Python 3.5. '
    +                  'See http://bugs.python.org/issue13936 for reference.',
    +                  {'maxversion': (3, 5)}),
    +        'W1503': ('Redundant use of %s with constant '
    +                  'value %r',
    +                  'redundant-unittest-assert',
    +                  'The first argument of assertTrue and assertFalse is '
    +                  'a condition. If a constant is passed as parameter, that '
    +                  'condition will be always true. In this case a warning '
    +                  'should be emitted.'),
    +        'W1504': ('Using type() instead of isinstance() for a typecheck.',
    +                  'unidiomatic-typecheck',
    +                  'The idiomatic way to perform an explicit typecheck in '
    +                  'Python is to use isinstance(x, Y) rather than '
    +                  'type(x) == Y, type(x) is Y. Though there are unusual '
    +                  'situations where these give different results.')
    +    }
    +
    +    @utils.check_messages('bad-open-mode', 'redundant-unittest-assert')
    +    def visit_callfunc(self, node):
    +        """Visit a CallFunc node."""
    +        if hasattr(node, 'func'):
    +            infer = utils.safe_infer(node.func)
    +            if infer:
    +                if infer.root().name == OPEN_MODULE:
    +                    if getattr(node.func, 'name', None) in ('open', 'file'):
    +                        self._check_open_mode(node)
    +                if infer.root().name == 'unittest.case':
    +                    self._check_redundant_assert(node, infer)
    +
    +    @utils.check_messages('boolean-datetime')
    +    def visit_unaryop(self, node):
    +        if node.op == 'not':
    +            self._check_datetime(node.operand)
    +
    +    @utils.check_messages('boolean-datetime')
    +    def visit_if(self, node):
    +        self._check_datetime(node.test)
    +
    +    @utils.check_messages('boolean-datetime')
    +    def visit_ifexp(self, node):
    +        self._check_datetime(node.test)
    +
    +    @utils.check_messages('boolean-datetime')
    +    def visit_boolop(self, node):
    +        for value in node.values:
    +            self._check_datetime(value)
    +
    +    @utils.check_messages('unidiomatic-typecheck')
    +    def visit_compare(self, node):
    +        operator, right = node.ops[0]
    +        if operator in TYPECHECK_COMPARISON_OPERATORS:
    +            left = node.left
    +            if _is_one_arg_pos_call(left):
    +                self._check_type_x_is_y(node, left, operator, right)
    +
    +    def _check_redundant_assert(self, node, infer):
    +        if (isinstance(infer, astroid.BoundMethod) and
    +                node.args and isinstance(node.args[0], astroid.Const) and
    +                infer.name in ['assertTrue', 'assertFalse']):
    +            self.add_message('redundant-unittest-assert',
    +                             args=(infer.name, node.args[0].value, ),
    +                             node=node)
    +
    +    def _check_datetime(self, node):
    +        """ Check that a datetime was infered.
    +        If so, emit boolean-datetime warning.
    +        """
    +        try:
    +            infered = next(node.infer())
    +        except astroid.InferenceError:
    +            return
    +        if (isinstance(infered, Instance) and
    +                infered.qname() == 'datetime.time'):
    +            self.add_message('boolean-datetime', node=node)
    +
    +    def _check_open_mode(self, node):
    +        """Check that the mode argument of an open or file call is valid."""
    +        try:
    +            mode_arg = utils.get_argument_from_call(node, position=1,
    +                                                    keyword='mode')
    +        except utils.NoSuchArgumentError:
    +            return
    +        if mode_arg:
    +            mode_arg = utils.safe_infer(mode_arg)
    +            if (isinstance(mode_arg, astroid.Const)
    +                    and not _check_mode_str(mode_arg.value)):
    +                self.add_message('bad-open-mode', node=node,
    +                                 args=mode_arg.value)
    +
    +    def _check_type_x_is_y(self, node, left, operator, right):
    +        """Check for expressions like type(x) == Y."""
    +        left_func = utils.safe_infer(left.func)
    +        if not (isinstance(left_func, astroid.Class)
    +                and left_func.qname() == TYPE_QNAME):
    +            return
    +
    +        if operator in ('is', 'is not') and _is_one_arg_pos_call(right):
    +            right_func = utils.safe_infer(right.func)
    +            if (isinstance(right_func, astroid.Class)
    +                    and right_func.qname() == TYPE_QNAME):
    +                # type(x) == type(a)
    +                right_arg = utils.safe_infer(right.args[0])
    +                if not isinstance(right_arg, LITERAL_NODE_TYPES):
    +                    # not e.g. type(x) == type([])
    +                    return
    +        self.add_message('unidiomatic-typecheck', node=node)
    +
    +
    +def register(linter):
    +    """required method to auto register this checker """
    +    linter.register_checker(StdlibChecker(linter))
    diff --git a/pymode/libs/pylint/checkers/strings.py b/pymode/libs/pylint/checkers/strings.py
    new file mode 100644
    index 00000000..8892c2cc
    --- /dev/null
    +++ b/pymode/libs/pylint/checkers/strings.py
    @@ -0,0 +1,615 @@
    +# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
    +# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
    +# Copyright 2012 Google Inc.
    +#
    +# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Checker for string formatting operations.
    +"""
    +
    +import sys
    +import tokenize
    +import string
    +import numbers
    +
    +import astroid
    +
    +from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
    +from pylint.checkers import BaseChecker, BaseTokenChecker
    +from pylint.checkers import utils
    +from pylint.checkers.utils import check_messages
    +
    +import six
    +
    +
    +_PY3K = sys.version_info[:2] >= (3, 0)
    +_PY27 = sys.version_info[:2] == (2, 7)
    +
    +MSGS = {
    +    'E1300': ("Unsupported format character %r (%#02x) at index %d",
    +              "bad-format-character",
    +              "Used when a unsupported format character is used in a format\
    +              string."),
    +    'E1301': ("Format string ends in middle of conversion specifier",
    +              "truncated-format-string",
    +              "Used when a format string terminates before the end of a \
    +              conversion specifier."),
    +    'E1302': ("Mixing named and unnamed conversion specifiers in format string",
    +              "mixed-format-string",
    +              "Used when a format string contains both named (e.g. '%(foo)d') \
    +              and unnamed (e.g. '%d') conversion specifiers.  This is also \
    +              used when a named conversion specifier contains * for the \
    +              minimum field width and/or precision."),
    +    'E1303': ("Expected mapping for format string, not %s",
    +              "format-needs-mapping",
    +              "Used when a format string that uses named conversion specifiers \
    +              is used with an argument that is not a mapping."),
    +    'W1300': ("Format string dictionary key should be a string, not %s",
    +              "bad-format-string-key",
    +              "Used when a format string that uses named conversion specifiers \
    +              is used with a dictionary whose keys are not all strings."),
    +    'W1301': ("Unused key %r in format string dictionary",
    +              "unused-format-string-key",
    +              "Used when a format string that uses named conversion specifiers \
    +              is used with a dictionary that conWtains keys not required by the \
    +              format string."),
    +    'E1304': ("Missing key %r in format string dictionary",
    +              "missing-format-string-key",
    +              "Used when a format string that uses named conversion specifiers \
    +              is used with a dictionary that doesn't contain all the keys \
    +              required by the format string."),
    +    'E1305': ("Too many arguments for format string",
    +              "too-many-format-args",
    +              "Used when a format string that uses unnamed conversion \
    +              specifiers is given too many arguments."),
    +    'E1306': ("Not enough arguments for format string",
    +              "too-few-format-args",
    +              "Used when a format string that uses unnamed conversion \
    +              specifiers is given too few arguments"),
    +
    +    'W1302': ("Invalid format string",
    +              "bad-format-string",
    +              "Used when a PEP 3101 format string is invalid.",
    +              {'minversion': (2, 7)}),
    +    'W1303': ("Missing keyword argument %r for format string",
    +              "missing-format-argument-key",
    +              "Used when a PEP 3101 format string that uses named fields "
    +              "doesn't receive one or more required keywords.",
    +              {'minversion': (2, 7)}),
    +    'W1304': ("Unused format argument %r",
    +              "unused-format-string-argument",
    +              "Used when a PEP 3101 format string that uses named "
    +              "fields is used with an argument that "
    +              "is not required by the format string.",
    +              {'minversion': (2, 7)}),
    +    'W1305': ("Format string contains both automatic field numbering "
    +              "and manual field specification",
    +              "format-combined-specification",
    +              "Usen when a PEP 3101 format string contains both automatic "
    +              "field numbering (e.g. '{}') and manual field "
    +              "specification (e.g. '{0}').",
    +              {'minversion': (2, 7)}),
    +    'W1306': ("Missing format attribute %r in format specifier %r",
    +              "missing-format-attribute",
    +              "Used when a PEP 3101 format string uses an "
    +              "attribute specifier ({0.length}), but the argument "
    +              "passed for formatting doesn't have that attribute.",
    +              {'minversion': (2, 7)}),
    +    'W1307': ("Using invalid lookup key %r in format specifier %r",
    +              "invalid-format-index",
    +              "Used when a PEP 3101 format string uses a lookup specifier "
    +              "({a[1]}), but the argument passed for formatting "
    +              "doesn't contain or doesn't have that key as an attribute.",
    +              {'minversion': (2, 7)})
    +    }
    +
    +OTHER_NODES = (astroid.Const, astroid.List, astroid.Backquote,
    +               astroid.Lambda, astroid.Function,
    +               astroid.ListComp, astroid.SetComp, astroid.GenExpr)
    +
    +if _PY3K:
    +    import _string
    +
    +    def split_format_field_names(format_string):
    +        return _string.formatter_field_name_split(format_string)
    +else:
    +    def _field_iterator_convertor(iterator):
    +        for is_attr, key in iterator:
    +            if isinstance(key, numbers.Number):
    +                yield is_attr, int(key)
    +            else:
    +                yield is_attr, key
    +
    +    def split_format_field_names(format_string):
    +        keyname, fielditerator = format_string._formatter_field_name_split()
    +        # it will return longs, instead of ints, which will complicate
    +        # the output
    +        return keyname, _field_iterator_convertor(fielditerator)
    +
    +
    +def collect_string_fields(format_string):
    +    """ Given a format string, return an iterator
    +    of all the valid format fields. It handles nested fields
    +    as well.
    +    """
    +
    +    formatter = string.Formatter()
    +    try:
    +        parseiterator = formatter.parse(format_string)
    +        for result in parseiterator:
    +            if all(item is None for item in result[1:]):
    +                # not a replacement format
    +                continue
    +            name = result[1]
    +            nested = result[2]
    +            yield name
    +            if nested:
    +                for field in collect_string_fields(nested):
    +                    yield field
    +    except ValueError:
    +        # probably the format string is invalid
    +        # should we check the argument of the ValueError?
    +        raise utils.IncompleteFormatString(format_string)
    +
    +def parse_format_method_string(format_string):
    +    """
    +    Parses a PEP 3101 format string, returning a tuple of
    +    (keys, num_args, manual_pos_arg),
    +    where keys is the set of mapping keys in the format string, num_args
    +    is the number of arguments required by the format string and
    +    manual_pos_arg is the number of arguments passed with the position.
    +    """
    +    keys = []
    +    num_args = 0
    +    manual_pos_arg = set()
    +    for name in collect_string_fields(format_string):
    +        if name and str(name).isdigit():
    +            manual_pos_arg.add(str(name))
    +        elif name:
    +            keyname, fielditerator = split_format_field_names(name)
    +            if isinstance(keyname, numbers.Number):
    +                # In Python 2 it will return long which will lead
    +                # to different output between 2 and 3
    +                manual_pos_arg.add(str(keyname))
    +                keyname = int(keyname)
    +            keys.append((keyname, list(fielditerator)))
    +        else:
    +            num_args += 1
    +    return keys, num_args, len(manual_pos_arg)
    +
    +def get_args(callfunc):
    +    """ Get the arguments from the given `CallFunc` node.
    +    Return a tuple, where the first element is the
    +    number of positional arguments and the second element
    +    is the keyword arguments in a dict.
    +    """
    +    positional = 0
    +    named = {}
    +
    +    for arg in callfunc.args:
    +        if isinstance(arg, astroid.Keyword):
    +            named[arg.arg] = utils.safe_infer(arg.value)
    +        else:
    +            positional += 1
    +    return positional, named
    +
    +def get_access_path(key, parts):
    +    """ Given a list of format specifiers, returns
    +    the final access path (e.g. a.b.c[0][1]).
    +    """
    +    path = []
    +    for is_attribute, specifier in parts:
    +        if is_attribute:
    +            path.append(".{}".format(specifier))
    +        else:
    +            path.append("[{!r}]".format(specifier))
    +    return str(key) + "".join(path)
    +
    +
    +class StringFormatChecker(BaseChecker):
    +    """Checks string formatting operations to ensure that the format string
    +    is valid and the arguments match the format string.
    +    """
    +
    +    __implements__ = (IAstroidChecker,)
    +    name = 'string'
    +    msgs = MSGS
    +
    +    @check_messages(*(MSGS.keys()))
    +    def visit_binop(self, node):
    +        if node.op != '%':
    +            return
    +        left = node.left
    +        args = node.right
    +
    +        if not (isinstance(left, astroid.Const)
    +                and isinstance(left.value, six.string_types)):
    +            return
    +        format_string = left.value
    +        try:
    +            required_keys, required_num_args = \
    +                utils.parse_format_string(format_string)
    +        except utils.UnsupportedFormatCharacter as e:
    +            c = format_string[e.index]
    +            self.add_message('bad-format-character',
    +                             node=node, args=(c, ord(c), e.index))
    +            return
    +        except utils.IncompleteFormatString:
    +            self.add_message('truncated-format-string', node=node)
    +            return
    +        if required_keys and required_num_args:
    +            # The format string uses both named and unnamed format
    +            # specifiers.
    +            self.add_message('mixed-format-string', node=node)
    +        elif required_keys:
    +            # The format string uses only named format specifiers.
    +            # Check that the RHS of the % operator is a mapping object
    +            # that contains precisely the set of keys required by the
    +            # format string.
    +            if isinstance(args, astroid.Dict):
    +                keys = set()
    +                unknown_keys = False
    +                for k, _ in args.items:
    +                    if isinstance(k, astroid.Const):
    +                        key = k.value
    +                        if isinstance(key, six.string_types):
    +                            keys.add(key)
    +                        else:
    +                            self.add_message('bad-format-string-key',
    +                                             node=node, args=key)
    +                    else:
    +                        # One of the keys was something other than a
    +                        # constant.  Since we can't tell what it is,
    +                        # supress checks for missing keys in the
    +                        # dictionary.
    +                        unknown_keys = True
    +                if not unknown_keys:
    +                    for key in required_keys:
    +                        if key not in keys:
    +                            self.add_message('missing-format-string-key',
    +                                             node=node, args=key)
    +                for key in keys:
    +                    if key not in required_keys:
    +                        self.add_message('unused-format-string-key',
    +                                         node=node, args=key)
    +            elif isinstance(args, OTHER_NODES + (astroid.Tuple,)):
    +                type_name = type(args).__name__
    +                self.add_message('format-needs-mapping',
    +                                 node=node, args=type_name)
    +            # else:
    +                # The RHS of the format specifier is a name or
    +                # expression.  It may be a mapping object, so
    +                # there's nothing we can check.
    +        else:
    +            # The format string uses only unnamed format specifiers.
    +            # Check that the number of arguments passed to the RHS of
    +            # the % operator matches the number required by the format
    +            # string.
    +            if isinstance(args, astroid.Tuple):
    +                num_args = len(args.elts)
    +            elif isinstance(args, OTHER_NODES + (astroid.Dict, astroid.DictComp)):
    +                num_args = 1
    +            else:
    +                # The RHS of the format specifier is a name or
    +                # expression.  It could be a tuple of unknown size, so
    +                # there's nothing we can check.
    +                num_args = None
    +            if num_args is not None:
    +                if num_args > required_num_args:
    +                    self.add_message('too-many-format-args', node=node)
    +                elif num_args < required_num_args:
    +                    self.add_message('too-few-format-args', node=node)
    +
    +
    +class StringMethodsChecker(BaseChecker):
    +    __implements__ = (IAstroidChecker,)
    +    name = 'string'
    +    msgs = {
    +        'E1310': ("Suspicious argument in %s.%s call",
    +                  "bad-str-strip-call",
    +                  "The argument to a str.{l,r,}strip call contains a"
    +                  " duplicate character, "),
    +        }
    +
    +    @check_messages(*(MSGS.keys()))
    +    def visit_callfunc(self, node):
    +        func = utils.safe_infer(node.func)
    +        if (isinstance(func, astroid.BoundMethod)
    +                and isinstance(func.bound, astroid.Instance)
    +                and func.bound.name in ('str', 'unicode', 'bytes')):
    +            if func.name in ('strip', 'lstrip', 'rstrip') and node.args:
    +                arg = utils.safe_infer(node.args[0])
    +                if not isinstance(arg, astroid.Const):
    +                    return
    +                if len(arg.value) != len(set(arg.value)):
    +                    self.add_message('bad-str-strip-call', node=node,
    +                                     args=(func.bound.name, func.name))
    +            elif func.name == 'format':
    +                if _PY27 or _PY3K:
    +                    self._check_new_format(node, func)
    +
    +    def _check_new_format(self, node, func):
    +        """ Check the new string formatting. """
    +        # TODO: skip (for now) format nodes which don't have
    +        #       an explicit string on the left side of the format operation.
    +        #       We do this because our inference engine can't properly handle
    +        #       redefinitions of the original string.
    +        #       For more details, see issue 287.
    +        #
    +        # Note that there may not be any left side at all, if the format method
    +        # has been assigned to another variable. See issue 351. For example:
    +        #
    +        #    fmt = 'some string {}'.format
    +        #    fmt('arg')
    +        if (isinstance(node.func, astroid.Getattr)
    +                and not isinstance(node.func.expr, astroid.Const)):
    +            return
    +        try:
    +            strnode = next(func.bound.infer())
    +        except astroid.InferenceError:
    +            return
    +        if not isinstance(strnode, astroid.Const):
    +            return
    +        if node.starargs or node.kwargs:
    +            # TODO: Don't complicate the logic, skip these for now.
    +            return
    +        try:
    +            positional, named = get_args(node)
    +        except astroid.InferenceError:
    +            return
    +        try:
    +            fields, num_args, manual_pos = parse_format_method_string(strnode.value)
    +        except utils.IncompleteFormatString:
    +            self.add_message('bad-format-string', node=node)
    +            return
    +
    +        named_fields = set(field[0] for field in fields
    +                           if isinstance(field[0], six.string_types))
    +        if num_args and manual_pos:
    +            self.add_message('format-combined-specification',
    +                             node=node)
    +            return
    +
    +        check_args = False
    +        # Consider "{[0]} {[1]}" as num_args.
    +        num_args += sum(1 for field in named_fields
    +                        if field == '')
    +        if named_fields:
    +            for field in named_fields:
    +                if field not in named and field:
    +                    self.add_message('missing-format-argument-key',
    +                                     node=node,
    +                                     args=(field, ))
    +            for field in named:
    +                if field not in named_fields:
    +                    self.add_message('unused-format-string-argument',
    +                                     node=node,
    +                                     args=(field, ))
    +            # num_args can be 0 if manual_pos is not.
    +            num_args = num_args or manual_pos
    +            if positional or num_args:
    +                empty = any(True for field in named_fields
    +                            if field == '')
    +                if named or empty:
    +                    # Verify the required number of positional arguments
    +                    # only if the .format got at least one keyword argument.
    +                    # This means that the format strings accepts both
    +                    # positional and named fields and we should warn
    +                    # when one of the them is missing or is extra.
    +                    check_args = True
    +        else:
    +            check_args = True
    +        if check_args:
    +            # num_args can be 0 if manual_pos is not.
    +            num_args = num_args or manual_pos
    +            if positional > num_args:
    +                self.add_message('too-many-format-args', node=node)
    +            elif positional < num_args:
    +                self.add_message('too-few-format-args', node=node)
    +
    +        self._check_new_format_specifiers(node, fields, named)
    +
    +    def _check_new_format_specifiers(self, node, fields, named):
    +        """
    +        Check attribute and index access in the format
    +        string ("{0.a}" and "{0[a]}").
    +        """
    +        for key, specifiers in fields:
    +            # Obtain the argument. If it can't be obtained
    +            # or infered, skip this check.
    +            if key == '':
    +                # {[0]} will have an unnamed argument, defaulting
    +                # to 0. It will not be present in `named`, so use the value
    +                # 0 for it.
    +                key = 0
    +            if isinstance(key, numbers.Number):
    +                try:
    +                    argname = utils.get_argument_from_call(node, key)
    +                except utils.NoSuchArgumentError:
    +                    continue
    +            else:
    +                if key not in named:
    +                    continue
    +                argname = named[key]
    +            if argname in (astroid.YES, None):
    +                continue
    +            try:
    +                argument = next(argname.infer())
    +            except astroid.InferenceError:
    +                continue
    +            if not specifiers or argument is astroid.YES:
    +                # No need to check this key if it doesn't
    +                # use attribute / item access
    +                continue
    +            if argument.parent and isinstance(argument.parent, astroid.Arguments):
    +                # Ignore any object coming from an argument,
    +                # because we can't infer its value properly.
    +                continue
    +            previous = argument
    +            parsed = []
    +            for is_attribute, specifier in specifiers:
    +                if previous is astroid.YES:
    +                    break
    +                parsed.append((is_attribute, specifier))
    +                if is_attribute:
    +                    try:
    +                        previous = previous.getattr(specifier)[0]
    +                    except astroid.NotFoundError:
    +                        if (hasattr(previous, 'has_dynamic_getattr') and
    +                                previous.has_dynamic_getattr()):
    +                            # Don't warn if the object has a custom __getattr__
    +                            break
    +                        path = get_access_path(key, parsed)
    +                        self.add_message('missing-format-attribute',
    +                                         args=(specifier, path),
    +                                         node=node)
    +                        break
    +                else:
    +                    warn_error = False
    +                    if hasattr(previous, 'getitem'):
    +                        try:
    +                            previous = previous.getitem(specifier)
    +                        except (IndexError, TypeError):
    +                            warn_error = True
    +                    else:
    +                        try:
    +                            # Lookup __getitem__ in the current node,
    +                            # but skip further checks, because we can't
    +                            # retrieve the looked object
    +                            previous.getattr('__getitem__')
    +                            break
    +                        except astroid.NotFoundError:
    +                            warn_error = True
    +                    if warn_error:
    +                        path = get_access_path(key, parsed)
    +                        self.add_message('invalid-format-index',
    +                                         args=(specifier, path),
    +                                         node=node)
    +                        break
    +
    +                try:
    +                    previous = next(previous.infer())
    +                except astroid.InferenceError:
    +                    # can't check further if we can't infer it
    +                    break
    +
    +
    +
    +class StringConstantChecker(BaseTokenChecker):
    +    """Check string literals"""
    +    __implements__ = (ITokenChecker, IRawChecker)
    +    name = 'string_constant'
    +    msgs = {
    +        'W1401': ('Anomalous backslash in string: \'%s\'. '
    +                  'String constant might be missing an r prefix.',
    +                  'anomalous-backslash-in-string',
    +                  'Used when a backslash is in a literal string but not as an '
    +                  'escape.'),
    +        'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
    +                  'String constant might be missing an r or u prefix.',
    +                  'anomalous-unicode-escape-in-string',
    +                  'Used when an escape like \\u is encountered in a byte '
    +                  'string where it has no effect.'),
    +        }
    +
    +    # Characters that have a special meaning after a backslash in either
    +    # Unicode or byte strings.
    +    ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
    +
    +    # TODO(mbp): Octal characters are quite an edge case today; people may
    +    # prefer a separate warning where they occur.  \0 should be allowed.
    +
    +    # Characters that have a special meaning after a backslash but only in
    +    # Unicode strings.
    +    UNICODE_ESCAPE_CHARACTERS = 'uUN'
    +
    +    def process_module(self, module):
    +        self._unicode_literals = 'unicode_literals' in module.future_imports
    +
    +    def process_tokens(self, tokens):
    +        for (tok_type, token, (start_row, _), _, _) in tokens:
    +            if tok_type == tokenize.STRING:
    +                # 'token' is the whole un-parsed token; we can look at the start
    +                # of it to see whether it's a raw or unicode string etc.
    +                self.process_string_token(token, start_row)
    +
    +    def process_string_token(self, token, start_row):
    +        for i, c in enumerate(token):
    +            if c in '\'\"':
    +                quote_char = c
    +                break
    +        # pylint: disable=undefined-loop-variable
    +        prefix = token[:i].lower() #  markers like u, b, r.
    +        after_prefix = token[i:]
    +        if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
    +            string_body = after_prefix[3:-3]
    +        else:
    +            string_body = after_prefix[1:-1]  # Chop off quotes
    +        # No special checks on raw strings at the moment.
    +        if 'r' not in prefix:
    +            self.process_non_raw_string_token(prefix, string_body, start_row)
    +
    +    def process_non_raw_string_token(self, prefix, string_body, start_row):
    +        """check for bad escapes in a non-raw string.
    +
    +        prefix: lowercase string of eg 'ur' string prefix markers.
    +        string_body: the un-parsed body of the string, not including the quote
    +        marks.
    +        start_row: integer line number in the source.
    +        """
    +        # Walk through the string; if we see a backslash then escape the next
    +        # character, and skip over it.  If we see a non-escaped character,
    +        # alert, and continue.
    +        #
    +        # Accept a backslash when it escapes a backslash, or a quote, or
    +        # end-of-line, or one of the letters that introduce a special escape
    +        # sequence 
    +        #
    +        # TODO(mbp): Maybe give a separate warning about the rarely-used
    +        # \a \b \v \f?
    +        #
    +        # TODO(mbp): We could give the column of the problem character, but
    +        # add_message doesn't seem to have a way to pass it through at present.
    +        i = 0
    +        while True:
    +            i = string_body.find('\\', i)
    +            if i == -1:
    +                break
    +            # There must be a next character; having a backslash at the end
    +            # of the string would be a SyntaxError.
    +            next_char = string_body[i+1]
    +            match = string_body[i:i+2]
    +            if next_char in self.UNICODE_ESCAPE_CHARACTERS:
    +                if 'u' in prefix:
    +                    pass
    +                elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
    +                    pass  # unicode by default
    +                else:
    +                    self.add_message('anomalous-unicode-escape-in-string',
    +                                     line=start_row, args=(match, ))
    +            elif next_char not in self.ESCAPE_CHARACTERS:
    +                self.add_message('anomalous-backslash-in-string',
    +                                 line=start_row, args=(match, ))
    +            # Whether it was a valid escape or not, backslash followed by
    +            # another character can always be consumed whole: the second
    +            # character can never be the start of a new backslash escape.
    +            i += 2
    +
    +
    +
    +def register(linter):
    +    """required method to auto register this checker """
    +    linter.register_checker(StringFormatChecker(linter))
    +    linter.register_checker(StringMethodsChecker(linter))
    +    linter.register_checker(StringConstantChecker(linter))
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py b/pymode/libs/pylint/checkers/typecheck.py
    similarity index 65%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py
    rename to pymode/libs/pylint/checkers/typecheck.py
    index 25f7612e..9f074ae0 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/typecheck.py
    +++ b/pymode/libs/pylint/checkers/typecheck.py
    @@ -21,24 +21,23 @@
     
     import astroid
     from astroid import InferenceError, NotFoundError, YES, Instance
    +from astroid.bases import BUILTINS
     
    -from pylint.interfaces import IAstroidChecker
    +from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import safe_infer, is_super, check_messages
    +from pylint.checkers.utils import (
    +    safe_infer, is_super,
    +    check_messages, decorated_with_property)
     
     MSGS = {
         'E1101': ('%s %r has no %r member',
                   'no-member',
    -              'Used when a variable is accessed for an unexistent member.'),
    +              'Used when a variable is accessed for an unexistent member.',
    +              {'old_names': [('E1103', 'maybe-no-member')]}),
         'E1102': ('%s is not callable',
                   'not-callable',
                   'Used when an object being called has been inferred to a non \
                   callable object'),
    -    'E1103': ('%s %r has no %r member (but some types could not be inferred)',
    -              'maybe-no-member',
    -              'Used when a variable is accessed for an unexistent member, but \
    -              astroid was not able to interpret all possible types of this \
    -              variable.'),
         'E1111': ('Assigning to function call which doesn\'t return',
                   'assignment-from-no-return',
                   'Used when an assignment is done on a function call but the \
    @@ -55,11 +54,6 @@
                   'too-many-function-args',
                   'Used when a function call passes too many positional \
                   arguments.'),
    -    'E1122': ('Duplicate keyword argument %r in %s call',
    -              'duplicate-keyword-arg',
    -              'Used when a function call passes the same keyword argument \
    -              multiple times.',
    -              {'maxversion': (2, 6)}),
         'E1123': ('Unexpected keyword argument %r in %s call',
                   'unexpected-keyword-arg',
                   'Used when a function call passes a keyword argument that \
    @@ -72,10 +66,23 @@
         'E1125': ('Missing mandatory keyword argument %r in %s call',
                   'missing-kwoa',
                   ('Used when a function call does not pass a mandatory'
    -              ' keyword-only argument.'),
    +               ' keyword-only argument.'),
                   {'minversion': (3, 0)}),
    +    'E1126': ('Sequence index is not an int, slice, or instance with __index__',
    +              'invalid-sequence-index',
    +              'Used when a sequence type is indexed with an invalid type. '
    +              'Valid types are ints, slices, and objects with an __index__ '
    +              'method.'),
    +    'E1127': ('Slice index is not an int, None, or instance with __index__',
    +              'invalid-slice-index',
    +              'Used when a slice index is not an integer, None, or an object \
    +               with an __index__ method.'),
         }
     
    +# builtin sequence types in Python 2 and 3.
    +SEQUENCE_TYPES = set(['str', 'unicode', 'list', 'tuple', 'bytearray',
    +                      'xrange', 'range', 'bytes', 'memoryview'])
    +
     def _determine_callable(callable_obj):
         # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
         # and Function inherits Lambda.
    @@ -132,39 +139,38 @@ class TypeChecker(BaseChecker):
                      'help' : 'Tells whether missing members accessed in mixin \
     class should be ignored. A mixin class is detected if its name ends with \
     "mixin" (case insensitive).'}
    -                ),
    -                ('ignored-modules',
    -                 {'default': (),
    -                  'type': 'csv',
    -                  'metavar': '',
    -                  'help': 'List of module names for which member attributes \
    +               ),
    +               ('ignored-modules',
    +                {'default': (),
    +                 'type': 'csv',
    +                 'metavar': '',
    +                 'help': 'List of module names for which member attributes \
     should not be checked (useful for modules/projects where namespaces are \
     manipulated during runtime and thus existing member attributes cannot be \
     deduced by static analysis'},
    -                 ),
    +               ),
                    ('ignored-classes',
                     {'default' : ('SQLObject',),
                      'type' : 'csv',
                      'metavar' : '',
                      'help' : 'List of classes names for which member attributes \
     should not be checked (useful for classes with attributes dynamically set).'}
    -                 ),
    +               ),
     
                    ('zope',
                     {'default' : False, 'type' : 'yn', 'metavar': '',
                      'help' : 'When zope mode is activated, add a predefined set \
     of Zope acquired attributes to generated-members.'}
    -                ),
    +               ),
                    ('generated-members',
    -                {'default' : (
    -        'REQUEST', 'acl_users', 'aq_parent'),
    +                {'default' : ('REQUEST', 'acl_users', 'aq_parent'),
                      'type' : 'string',
                      'metavar' : '',
                      'help' : 'List of members which are set dynamically and \
     missed by pylint inference system, and so shouldn\'t trigger E0201 when \
     accessed. Python regular expressions are accepted.'}
    -                ),
    -        )
    +               ),
    +              )
     
         def open(self):
             # do this in open since config not fully initialized in __init__
    @@ -179,7 +185,7 @@ def visit_assattr(self, node):
         def visit_delattr(self, node):
             self.visit_getattr(node)
     
    -    @check_messages('no-member', 'maybe-no-member')
    +    @check_messages('no-member')
         def visit_getattr(self, node):
             """check that the accessed attribute exists
     
    @@ -241,6 +247,20 @@ def visit_getattr(self, node):
                     # explicit skipping of module member access
                     if owner.root().name in self.config.ignored_modules:
                         continue
    +                if isinstance(owner, astroid.Class):
    +                    # Look up in the metaclass only if the owner is itself
    +                    # a class.
    +                    # TODO: getattr doesn't return by default members
    +                    # from the metaclass, because handling various cases
    +                    # of methods accessible from the metaclass itself
    +                    # and/or subclasses only is too complicated for little to
    +                    # no benefit.
    +                    metaclass = owner.metaclass()
    +                    try:
    +                        if metaclass and metaclass.getattr(node.attrname):
    +                            continue
    +                    except NotFoundError:
    +                        pass
                     missingattr.add((owner, name))
                     continue
                 # stop on the first found
    @@ -257,13 +277,11 @@ def visit_getattr(self, node):
                     if actual in done:
                         continue
                     done.add(actual)
    -                if inference_failure:
    -                    msgid = 'maybe-no-member'
    -                else:
    -                    msgid = 'no-member'
    -                self.add_message(msgid, node=node,
    +                confidence = INFERENCE if not inference_failure else INFERENCE_FAILURE
    +                self.add_message('no-member', node=node,
                                      args=(owner.display_type(), name,
    -                                       node.attrname))
    +                                       node.attrname),
    +                                 confidence=confidence)
     
         @check_messages('assignment-from-no-return', 'assignment-from-none')
         def visit_assign(self, node):
    @@ -293,7 +311,51 @@ def visit_assign(self, node):
                 else:
                     self.add_message('assignment-from-none', node=node)
     
    -    @check_messages(*(MSGS.keys()))
    +    def _check_uninferable_callfunc(self, node):
    +        """
    +        Check that the given uninferable CallFunc node does not
    +        call an actual function.
    +        """
    +        if not isinstance(node.func, astroid.Getattr):
    +            return
    +
    +        # Look for properties. First, obtain
    +        # the lhs of the Getattr node and search the attribute
    +        # there. If that attribute is a property or a subclass of properties,
    +        # then most likely it's not callable.
    +
    +        # TODO: since astroid doesn't understand descriptors very well
    +        # we will not handle them here, right now.
    +
    +        expr = node.func.expr
    +        klass = safe_infer(expr)
    +        if (klass is None or klass is astroid.YES or
    +                not isinstance(klass, astroid.Instance)):
    +            return
    +
    +        try:
    +            attrs = klass._proxied.getattr(node.func.attrname)
    +        except astroid.NotFoundError:
    +            return
    +
    +        for attr in attrs:
    +            if attr is astroid.YES:
    +                continue
    +            if not isinstance(attr, astroid.Function):
    +                continue
    +
    +            # Decorated, see if it is decorated with a property.
    +            # Also, check the returns and see if they are callable.
    +            if decorated_with_property(attr):
    +                if all(return_node.callable()
    +                       for return_node in attr.infer_call_result(node)):
    +                    continue
    +                else:
    +                    self.add_message('not-callable', node=node,
    +                                     args=node.func.as_string())
    +                    break
    +
    +    @check_messages(*(list(MSGS.keys())))
         def visit_callfunc(self, node):
             """check that called functions/methods are inferred to callable objects,
             and that the arguments passed to the function match the parameters in
    @@ -305,22 +367,22 @@ def visit_callfunc(self, node):
             num_positional_args = 0
             for arg in node.args:
                 if isinstance(arg, astroid.Keyword):
    -                keyword = arg.arg
    -                if keyword in keyword_args:
    -                    self.add_message('duplicate-keyword-arg', node=node, args=keyword)
    -                keyword_args.add(keyword)
    +                keyword_args.add(arg.arg)
                 else:
                     num_positional_args += 1
     
             called = safe_infer(node.func)
             # only function, generator and object defining __call__ are allowed
             if called is not None and not called.callable():
    -            self.add_message('not-callable', node=node, args=node.func.as_string())
    +            self.add_message('not-callable', node=node,
    +                             args=node.func.as_string())
    +
    +        self._check_uninferable_callfunc(node)
     
             try:
                 called, implicit_args, callable_name = _determine_callable(called)
             except ValueError:
    -            # Any error occurred during determining the function type, most of 
    +            # Any error occurred during determining the function type, most of
                 # those errors are handled by different warnings.
                 return
             num_positional_args += implicit_args
    @@ -378,7 +440,8 @@ def visit_callfunc(self, node):
                     break
                 else:
                     # Too many positional arguments.
    -                self.add_message('too-many-function-args', node=node, args=(callable_name,))
    +                self.add_message('too-many-function-args',
    +                                 node=node, args=(callable_name,))
                     break
     
             # 2. Match the keyword arguments.
    @@ -387,13 +450,15 @@ def visit_callfunc(self, node):
                     i = parameter_name_to_index[keyword]
                     if parameters[i][1]:
                         # Duplicate definition of function parameter.
    -                    self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name))
    +                    self.add_message('redundant-keyword-arg',
    +                                     node=node, args=(keyword, callable_name))
                     else:
                         parameters[i][1] = True
                 elif keyword in kwparams:
                     if kwparams[keyword][1]:  # XXX is that even possible?
                         # Duplicate definition of function parameter.
    -                    self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name))
    +                    self.add_message('redundant-keyword-arg', node=node,
    +                                     args=(keyword, callable_name))
                     else:
                         kwparams[keyword][1] = True
                 elif called.args.kwarg is not None:
    @@ -401,7 +466,8 @@ def visit_callfunc(self, node):
                     pass
                 else:
                     # Unexpected keyword argument.
    -                self.add_message('unexpected-keyword-arg', node=node, args=(keyword, callable_name))
    +                self.add_message('unexpected-keyword-arg', node=node,
    +                                 args=(keyword, callable_name))
     
             # 3. Match the *args, if any.  Note that Python actually processes
             #    *args _before_ any keyword arguments, but we wait until after
    @@ -438,13 +504,123 @@ def visit_callfunc(self, node):
                         display_name = ''
                     else:
                         display_name = repr(name)
    -                self.add_message('no-value-for-parameter', node=node, args=(display_name, callable_name))
    +                self.add_message('no-value-for-parameter', node=node,
    +                                 args=(display_name, callable_name))
     
             for name in kwparams:
                 defval, assigned = kwparams[name]
                 if defval is None and not assigned:
    -                self.add_message('missing-kwoa', node=node, args=(name, callable_name))
    +                self.add_message('missing-kwoa', node=node,
    +                                 args=(name, callable_name))
    +
    +    @check_messages('invalid-sequence-index')
    +    def visit_extslice(self, node):
    +        # Check extended slice objects as if they were used as a sequence
    +        # index to check if the object being sliced can support them
    +        return self.visit_index(node)
    +
    +    @check_messages('invalid-sequence-index')
    +    def visit_index(self, node):
    +        if not node.parent or not hasattr(node.parent, "value"):
    +            return
    +
    +        # Look for index operations where the parent is a sequence type.
    +        # If the types can be determined, only allow indices to be int,
    +        # slice or instances with __index__.
    +
    +        parent_type = safe_infer(node.parent.value)
    +        if not isinstance(parent_type, (astroid.Class, astroid.Instance)):
    +            return
    +
    +        # Determine what method on the parent this index will use
    +        # The parent of this node will be a Subscript, and the parent of that
    +        # node determines if the Subscript is a get, set, or delete operation.
    +        operation = node.parent.parent
    +        if isinstance(operation, astroid.Assign):
    +            methodname = '__setitem__'
    +        elif isinstance(operation, astroid.Delete):
    +            methodname = '__delitem__'
    +        else:
    +            methodname = '__getitem__'
    +
    +        # Check if this instance's __getitem__, __setitem__, or __delitem__, as
    +        # appropriate to the statement, is implemented in a builtin sequence
    +        # type. This way we catch subclasses of sequence types but skip classes
    +        # that override __getitem__ and which may allow non-integer indices.
    +        try:
    +            methods = parent_type.getattr(methodname)
    +            if methods is astroid.YES:
    +                return
    +            itemmethod = methods[0]
    +        except (astroid.NotFoundError, IndexError):
    +            return
    +
    +        if not isinstance(itemmethod, astroid.Function):
    +            return
    +        if itemmethod.root().name != BUILTINS:
    +            return
    +        if not itemmethod.parent:
    +            return
    +        if itemmethod.parent.name not in SEQUENCE_TYPES:
    +            return
    +
    +        # For ExtSlice objects coming from visit_extslice, no further
    +        # inference is necessary, since if we got this far the ExtSlice
    +        # is an error.
    +        if isinstance(node, astroid.ExtSlice):
    +            index_type = node
    +        else:
    +            index_type = safe_infer(node)
    +        if index_type is None or index_type is astroid.YES:
    +            return
    +
    +        # Constants must be of type int
    +        if isinstance(index_type, astroid.Const):
    +            if isinstance(index_type.value, int):
    +                return
    +        # Instance values must be int, slice, or have an __index__ method
    +        elif isinstance(index_type, astroid.Instance):
    +            if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'):
    +                return
    +            try:
    +                index_type.getattr('__index__')
    +                return
    +            except astroid.NotFoundError:
    +                pass
    +
    +        # Anything else is an error
    +        self.add_message('invalid-sequence-index', node=node)
    +
    +    @check_messages('invalid-slice-index')
    +    def visit_slice(self, node):
    +        # Check the type of each part of the slice
    +        for index in (node.lower, node.upper, node.step):
    +            if index is None:
    +                continue
    +
    +            index_type = safe_infer(index)
    +            if index_type is None or index_type is astroid.YES:
    +                continue
    +
    +            # Constants must of type int or None
    +            if isinstance(index_type, astroid.Const):
    +                if isinstance(index_type.value, (int, type(None))):
    +                    continue
    +            # Instance values must be of type int, None or an object
    +            # with __index__
    +            elif isinstance(index_type, astroid.Instance):
    +                if index_type.pytype() in (BUILTINS + '.int',
    +                                           BUILTINS + '.NoneType'):
    +                    continue
    +
    +                try:
    +                    index_type.getattr('__index__')
    +                    return
    +                except astroid.NotFoundError:
    +                    pass
     
    +            # Anything else is an error
    +            self.add_message('invalid-slice-index', node=node)
     
     def register(linter):
         """required method to auto register this checker """
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py b/pymode/libs/pylint/checkers/utils.py
    similarity index 69%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py
    rename to pymode/libs/pylint/checkers/utils.py
    index e7d85d41..2cb01d55 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/utils.py
    +++ b/pymode/libs/pylint/checkers/utils.py
    @@ -19,6 +19,7 @@
     """
     
     import re
    +import sys
     import string
     
     import astroid
    @@ -26,8 +27,15 @@
     from logilab.common.compat import builtins
     
     BUILTINS_NAME = builtins.__name__
    -
     COMP_NODE_TYPES = astroid.ListComp, astroid.SetComp, astroid.DictComp, astroid.GenExpr
    +PY3K = sys.version_info[0] == 3
    +
    +if not PY3K:
    +    EXCEPTIONS_MODULE = "exceptions"
    +else:
    +    EXCEPTIONS_MODULE = "builtins"
    +ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
    +                   'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
     
     
     class NoSuchArgumentError(Exception):
    @@ -66,11 +74,10 @@ def clobber_in_except(node):
             if is_builtin(name):
                 return (True, (name, 'builtins'))
             else:
    -            scope, stmts = node.lookup(name)
    -            if (stmts and
    -                not isinstance(stmts[0].ass_type(),
    -                               (astroid.Assign, astroid.AugAssign,
    -                                astroid.ExceptHandler))):
    +            stmts = node.lookup(name)[1]
    +            if (stmts and not isinstance(stmts[0].ass_type(),
    +                                         (astroid.Assign, astroid.AugAssign,
    +                                          astroid.ExceptHandler))):
                     return (True, (name, 'outer scope (line %s)' % stmts[0].fromlineno))
         return (False, None)
     
    @@ -82,11 +89,11 @@ def safe_infer(node):
         """
         try:
             inferit = node.infer()
    -        value = inferit.next()
    +        value = next(inferit)
         except astroid.InferenceError:
             return
         try:
    -        inferit.next()
    +        next(inferit)
             return # None if there is ambiguity on the inferred node
         except astroid.InferenceError:
             return # there is some kind of ambiguity
    @@ -152,12 +159,12 @@ def is_defined_before(var_node):
                     if ass_node.name == varname:
                         return True
             elif isinstance(_node, astroid.With):
    -            for expr, vars in _node.items:
    +            for expr, ids in _node.items:
                     if expr.parent_of(var_node):
    -                    break                
    -                if (vars and
    -                    isinstance(vars, astroid.AssName) and
    -                    vars.name == varname):
    +                    break
    +                if (ids and
    +                        isinstance(ids, astroid.AssName) and
    +                        ids.name == varname):
                         return True
             elif isinstance(_node, (astroid.Lambda, astroid.Function)):
                 if _node.args.is_argument(varname):
    @@ -204,9 +211,9 @@ def is_func_decorator(node):
             if isinstance(parent, astroid.Decorators):
                 return True
             if (parent.is_statement or
    -            isinstance(parent, astroid.Lambda) or
    -            isinstance(parent, (scoped_nodes.ComprehensionScope,
    -                                scoped_nodes.ListComp))):
    +                isinstance(parent, astroid.Lambda) or
    +                isinstance(parent, (scoped_nodes.ComprehensionScope,
    +                                    scoped_nodes.ListComp))):
                 break
             parent = parent.parent
         return False
    @@ -268,7 +275,7 @@ def overrides_a_method(class_node, name):
                      '__or__', '__ior__', '__ror__',
                      '__xor__', '__ixor__', '__rxor__',
                      # XXX To be continued
    -                 ))
    +                ))
     
     def check_messages(*messages):
         """decorator to store messages that are handled by a checker method"""
    @@ -345,7 +352,11 @@ def next_char(i):
                 if char in 'hlL':
                     i, char = next_char(i)
                 # Parse the conversion type (mandatory).
    -            if char not in 'diouxXeEfFgGcrs%':
    +            if PY3K:
    +                flags = 'diouxXeEfFgGcrs%a'
    +            else:
    +                flags = 'diouxXeEfFgGcrs%'
    +            if char not in flags:
                     raise UnsupportedFormatCharacter(i)
                 if key:
                     keys.add(key)
    @@ -354,12 +365,13 @@ def next_char(i):
             i += 1
         return keys, num_args
     
    +
     def is_attr_protected(attrname):
         """return True if attribute name is protected (start with _ and some other
         details), False otherwise.
         """
         return attrname[0] == '_' and not attrname == '_' and not (
    -             attrname.startswith('__') and attrname.endswith('__'))
    +        attrname.startswith('__') and attrname.endswith('__'))
     
     def node_frame_class(node):
         """return klass node for a method node (or a staticmethod or a
    @@ -380,8 +392,8 @@ def is_super_call(expr):
         is super. Check before that you're in a method.
         """
         return (isinstance(expr, astroid.CallFunc) and
    -        isinstance(expr.func, astroid.Name) and
    -        expr.func.name == 'super')
    +            isinstance(expr.func, astroid.Name) and
    +            expr.func.name == 'super')
     
     def is_attr_private(attrname):
         """Check that attribute name is private (at least two leading underscores,
    @@ -407,10 +419,146 @@ def get_argument_from_call(callfunc_node, position=None, keyword=None):
         try:
             if position is not None and not isinstance(callfunc_node.args[position], astroid.Keyword):
                 return callfunc_node.args[position]
    -    except IndexError, error:
    +    except IndexError as error:
             raise NoSuchArgumentError(error)
         if keyword:
             for arg in callfunc_node.args:
                 if isinstance(arg, astroid.Keyword) and arg.arg == keyword:
                     return arg.value
         raise NoSuchArgumentError
    +
    +def inherit_from_std_ex(node):
    +    """
    +    Return true if the given class node is subclass of
    +    exceptions.Exception.
    +    """
    +    if node.name in ('Exception', 'BaseException') \
    +            and node.root().name == EXCEPTIONS_MODULE:
    +        return True
    +    return any(inherit_from_std_ex(parent)
    +               for parent in node.ancestors(recurs=False))
    +
    +def is_import_error(handler):
    +    """
    +    Check if the given exception handler catches
    +    ImportError.
    +
    +    :param handler: A node, representing an ExceptHandler node.
    +    :returns: True if the handler catches ImportError, False otherwise.
    +    """
    +    names = None
    +    if isinstance(handler.type, astroid.Tuple):
    +        names = [name for name in handler.type.elts
    +                 if isinstance(name, astroid.Name)]
    +    elif isinstance(handler.type, astroid.Name):
    +        names = [handler.type]
    +    else:
    +        # Don't try to infer that.
    +        return
    +    for name in names:
    +        try:
    +            for infered in name.infer():
    +                if (isinstance(infered, astroid.Class) and
    +                        inherit_from_std_ex(infered) and
    +                        infered.name == 'ImportError'):
    +                    return True
    +        except astroid.InferenceError:
    +            continue
    +
    +def has_known_bases(klass):
    +    """Returns true if all base classes of a class could be inferred."""
    +    try:
    +        return klass._all_bases_known
    +    except AttributeError:
    +        pass
    +    for base in klass.bases:
    +        result = safe_infer(base)
    +        # TODO: check for A->B->A->B pattern in class structure too?
    +        if (not isinstance(result, astroid.Class) or
    +                result is klass or
    +                not has_known_bases(result)):
    +            klass._all_bases_known = False
    +            return False
    +    klass._all_bases_known = True
    +    return True
    +
    +def decorated_with_property(node):
    +    """ Detect if the given function node is decorated with a property. """
    +    if not node.decorators:
    +        return False
    +    for decorator in node.decorators.nodes:
    +        if not isinstance(decorator, astroid.Name):
    +            continue
    +        try:
    +            for infered in decorator.infer():
    +                if isinstance(infered, astroid.Class):
    +                    if (infered.root().name == BUILTINS_NAME and
    +                            infered.name == 'property'):
    +                        return True
    +                    for ancestor in infered.ancestors():
    +                        if (ancestor.name == 'property' and
    +                                ancestor.root().name == BUILTINS_NAME):
    +                            return True
    +        except astroid.InferenceError:
    +            pass
    +
    +
    +def decorated_with_abc(func):
    +    """Determine if the `func` node is decorated with `abc` decorators."""
    +    if func.decorators:
    +        for node in func.decorators.nodes:
    +            try:
    +                infered = next(node.infer())
    +            except astroid.InferenceError:
    +                continue
    +            if infered and infered.qname() in ABC_METHODS:
    +                return True
    +
    +
    +def unimplemented_abstract_methods(node, is_abstract_cb=decorated_with_abc):
    +    """
    +    Get the unimplemented abstract methods for the given *node*.
    +
    +    A method can be considered abstract if the callback *is_abstract_cb*
    +    returns a ``True`` value. The check defaults to verifying that
    +    a method is decorated with abstract methods.
    +    The function will work only for new-style classes. For old-style
    +    classes, it will simply return an empty dictionary.
    +    For the rest of them, it will return a dictionary of abstract method
    +    names and their inferred objects.
    +    """
    +    visited = {}
    +    try:
    +        mro = reversed(node.mro())
    +    except NotImplementedError:
    +        # Old style class, it will not have a mro.
    +        return {}
    +    except astroid.ResolveError:
    +        # Probably inconsistent hierarchy, don'try
    +        # to figure this out here.
    +        return {}
    +    for ancestor in mro:
    +        for obj in ancestor.values():
    +            infered = obj
    +            if isinstance(obj, astroid.AssName):
    +                infered = safe_infer(obj)
    +                if not infered:
    +                    continue
    +                if not isinstance(infered, astroid.Function):
    +                    if obj.name in visited:
    +                        del visited[obj.name]
    +            if isinstance(infered, astroid.Function):
    +                # It's critical to use the original name,
    +                # since after inferring, an object can be something
    +                # else than expected, as in the case of the
    +                # following assignment.
    +                #
    +                # class A:
    +                #     def keys(self): pass
    +                #     __iter__ = keys
    +                abstract = is_abstract_cb(infered)
    +                if abstract:
    +                    visited[obj.name] = infered
    +                elif not abstract and obj.name in visited:
    +                    del visited[obj.name]
    +    return visited
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py b/pymode/libs/pylint/checkers/variables.py
    similarity index 60%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py
    rename to pymode/libs/pylint/checkers/variables.py
    index dc8d1115..8f6f9574 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py
    +++ b/pymode/libs/pylint/checkers/variables.py
    @@ -17,20 +17,26 @@
     """
     import os
     import sys
    +import re
     from copy import copy
     
     import astroid
    -from astroid import are_exclusive, builtin_lookup, AstroidBuildingException
    +from astroid import are_exclusive, builtin_lookup
    +from astroid import modutils
     
    -from logilab.common.modutils import file_from_modpath
    -
    -from pylint.interfaces import IAstroidChecker
    +from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
    +from pylint.utils import get_global_option
     from pylint.checkers import BaseChecker
    -from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin,
    -     is_defined_before, is_error, is_func_default, is_func_decorator,
    -     assign_parent, check_messages, is_inside_except, clobber_in_except,
    -     get_all_elements)
    +from pylint.checkers.utils import (
    +    PYMETHODS, is_ancestor_name, is_builtin,
    +    is_defined_before, is_error, is_func_default, is_func_decorator,
    +    assign_parent, check_messages, is_inside_except, clobber_in_except,
    +    get_all_elements, has_known_bases)
    +import six
    +
    +SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
     
    +PY3K = sys.version_info >= (3, 0)
     
     def in_for_else_branch(parent, stmt):
         """Returns True if stmt in inside the else branch for a parent For stmt."""
    @@ -40,7 +46,7 @@ def in_for_else_branch(parent, stmt):
     def overridden_method(klass, name):
         """get overridden method if any"""
         try:
    -        parent = klass.local_attr_ancestors(name).next()
    +        parent = next(klass.local_attr_ancestors(name))
         except (StopIteration, KeyError):
             return None
         try:
    @@ -68,6 +74,120 @@ def _get_unpacking_extra_info(node, infered):
             more = ' defined at line %s of %s' % (infered.lineno, infered_module)
         return more
     
    +def _detect_global_scope(node, frame, defframe):
    +    """ Detect that the given frames shares a global
    +    scope.
    +
    +    Two frames shares a global scope when neither
    +    of them are hidden under a function scope, as well
    +    as any of parent scope of them, until the root scope.
    +    In this case, depending from something defined later on
    +    will not work, because it is still undefined.
    +
    +    Example:
    +        class A:
    +            # B has the same global scope as `C`, leading to a NameError.
    +            class B(C): ...
    +        class C: ...
    +
    +    """
    +    def_scope = scope = None
    +    if frame and frame.parent:
    +        scope = frame.parent.scope()
    +    if defframe and defframe.parent:
    +        def_scope = defframe.parent.scope()
    +    if isinstance(frame, astroid.Function):
    +        # If the parent of the current node is a
    +        # function, then it can be under its scope
    +        # (defined in, which doesn't concern us) or
    +        # the `->` part of annotations. The same goes
    +        # for annotations of function arguments, they'll have
    +        # their parent the Arguments node.
    +        if not isinstance(node.parent,
    +                          (astroid.Function, astroid.Arguments)):
    +            return False
    +    elif any(not isinstance(f, (astroid.Class, astroid.Module))
    +             for f in (frame, defframe)):
    +        # Not interested in other frames, since they are already
    +        # not in a global scope.
    +        return False
    +
    +    break_scopes = []
    +    for s in (scope, def_scope):
    +        # Look for parent scopes. If there is anything different
    +        # than a module or a class scope, then they frames don't
    +        # share a global scope.
    +        parent_scope = s
    +        while parent_scope:
    +            if not isinstance(parent_scope, (astroid.Class, astroid.Module)):
    +                break_scopes.append(parent_scope)
    +                break
    +            if parent_scope.parent:
    +                parent_scope = parent_scope.parent.scope()
    +            else:
    +                break
    +    if break_scopes and len(set(break_scopes)) != 1:
    +        # Store different scopes than expected.
    +        # If the stored scopes are, in fact, the very same, then it means
    +        # that the two frames (frame and defframe) shares the same scope,
    +        # and we could apply our lineno analysis over them.
    +        # For instance, this works when they are inside a function, the node
    +        # that uses a definition and the definition itself.
    +        return False
    +    # At this point, we are certain that frame and defframe shares a scope
    +    # and the definition of the first depends on the second.
    +    return frame.lineno < defframe.lineno
    +
    +def _fix_dot_imports(not_consumed):
    +    """ Try to fix imports with multiple dots, by returning a dictionary
    +    with the import names expanded. The function unflattens root imports,
    +    like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
    +    and 'xml.sax' respectively.
    +    """
    +    # TODO: this should be improved in issue astroid #46
    +    names = {}
    +    for name, stmts in six.iteritems(not_consumed):
    +        if any(isinstance(stmt, astroid.AssName)
    +               and isinstance(stmt.ass_type(), astroid.AugAssign)
    +               for stmt in stmts):
    +            continue
    +        for stmt in stmts:
    +            if not isinstance(stmt, (astroid.From, astroid.Import)):
    +                continue
    +            for imports in stmt.names:
    +                second_name = None
    +                if imports[0] == "*":
    +                    # In case of wildcard imports,
    +                    # pick the name from inside the imported module.
    +                    second_name = name
    +                else:
    +                    if imports[0].find(".") > -1 or name in imports:
    +                        # Most likely something like 'xml.etree',
    +                        # which will appear in the .locals as 'xml'.
    +                        # Only pick the name if it wasn't consumed.
    +                        second_name = imports[0]
    +                if second_name and second_name not in names:
    +                    names[second_name] = stmt
    +    return sorted(names.items(), key=lambda a: a[1].fromlineno)
    +
    +def _find_frame_imports(name, frame):
    +    """
    +    Detect imports in the frame, with the required
    +    *name*. Such imports can be considered assignments.
    +    Returns True if an import for the given name was found.
    +    """
    +    imports = frame.nodes_of_class((astroid.Import, astroid.From))
    +    for import_node in imports:
    +        for import_name, import_alias in import_node.names:
    +            # If the import uses an alias, check only that.
    +            # Otherwise, check only the import name.
    +            if import_alias:
    +                if import_alias == name:
    +                    return True
    +            elif import_name and import_name == name:
    +                return True
    +
    +
     MSGS = {
         'E0601': ('Using variable %r before assignment',
                   'used-before-assignment',
    @@ -97,13 +217,13 @@ def _get_unpacking_extra_info(node, infered):
         'W0603': ('Using the global statement', # W0121
                   'global-statement',
                   'Used when you use the "global" statement to update a global \
    -              variable. PyLint just try to discourage this \
    +              variable. Pylint just try to discourage this \
                   usage. That doesn\'t mean you can not use it !'),
         'W0604': ('Using the global statement at the module level', # W0103
                   'global-at-module-level',
                   'Used when you use the "global" statement at the module level \
                   since it has no effect'),
    -    'W0611': ('Unused import %s',
    +    'W0611': ('Unused %s',
                   'unused-import',
                   'Used when an imported module or variable is not used.'),
         'W0612': ('Unused variable %r',
    @@ -147,7 +267,7 @@ def _get_unpacking_extra_info(node, infered):
                   'a sequence is used in an unpack assignment'),
     
         'W0640': ('Cell variable %s defined in loop',
    -              'cell-var-from-loop', 
    +              'cell-var-from-loop',
                   'A variable used in a closure is defined in a loop. '
                   'This will result in all closures using the same value for '
                   'the closed-over variable.'),
    @@ -168,8 +288,7 @@ class VariablesChecker(BaseChecker):
         name = 'variables'
         msgs = MSGS
         priority = -1
    -    options = (
    -               ("init-import",
    +    options = (("init-import",
                     {'default': 0, 'type' : 'yn', 'metavar' : '',
                      'help' : 'Tells whether we should check for unused import in \
     __init__ files.'}),
    @@ -183,8 +302,15 @@ class VariablesChecker(BaseChecker):
                      'metavar' : '',
                      'help' : 'List of additional names supposed to be defined in \
     builtins. Remember that you should avoid to define new builtins when possible.'
    -                 }),
    +                }),
    +               ("callbacks",
    +                {'default' : ('cb_', '_cb'), 'type' : 'csv',
    +                 'metavar' : '',
    +                 'help' : 'List of strings which can identify a callback '
    +                          'function by name. A callback name must start or '
    +                          'end with one of those strings.'}
                    )
    +              )
         def __init__(self, linter=None):
             BaseChecker.__init__(self, linter)
             self._to_consume = None
    @@ -195,12 +321,14 @@ def visit_module(self, node):
             checks globals doesn't overrides builtins
             """
             self._to_consume = [(copy(node.locals), {}, 'module')]
    -        for name, stmts in node.locals.iteritems():
    +        for name, stmts in six.iteritems(node.locals):
                 if is_builtin(name) and not is_inside_except(stmts[0]):
                     # do not print Redefining builtin for additional builtins
                     self.add_message('redefined-builtin', args=name, node=stmts[0])
     
    -    @check_messages('unused-import', 'unused-wildcard-import', 'redefined-builtin', 'undefined-all-variable', 'invalid-all-object')
    +    @check_messages('unused-import', 'unused-wildcard-import',
    +                    'redefined-builtin', 'undefined-all-variable',
    +                    'invalid-all-object')
         def leave_module(self, node):
             """leave module: check globals
             """
    @@ -208,17 +336,18 @@ def leave_module(self, node):
             not_consumed = self._to_consume.pop()[0]
             # attempt to check for __all__ if defined
             if '__all__' in node.locals:
    -            assigned = node.igetattr('__all__').next()
    +            assigned = next(node.igetattr('__all__'))
                 if assigned is not astroid.YES:
                     for elt in getattr(assigned, 'elts', ()):
                         try:
    -                        elt_name = elt.infer().next()
    +                        elt_name = next(elt.infer())
                         except astroid.InferenceError:
                             continue
     
                         if not isinstance(elt_name, astroid.Const) \
    -                             or not isinstance(elt_name.value, basestring):
    -                        self.add_message('invalid-all-object', args=elt.as_string(), node=elt)
    +                             or not isinstance(elt_name.value, six.string_types):
    +                        self.add_message('invalid-all-object',
    +                                         args=elt.as_string(), node=elt)
                             continue
                         elt_name = elt_name.value
                         # If elt is in not_consumed, remove it from not_consumed
    @@ -235,12 +364,12 @@ def leave_module(self, node):
                                 if os.path.basename(basename) == '__init__':
                                     name = node.name + "." + elt_name
                                     try:
    -                                    file_from_modpath(name.split("."))
    +                                    modutils.file_from_modpath(name.split("."))
                                     except ImportError:
                                         self.add_message('undefined-all-variable',
                                                          args=elt_name,
                                                          node=elt)
    -                                except SyntaxError, exc:
    +                                except SyntaxError:
                                         # don't yield an syntax-error warning,
                                         # because it will be later yielded
                                         # when the file will be checked
    @@ -248,19 +377,52 @@ def leave_module(self, node):
             # don't check unused imports in __init__ files
             if not self.config.init_import and node.package:
                 return
    -        for name, stmts in not_consumed.iteritems():
    -            if any(isinstance(stmt, astroid.AssName)
    -                   and isinstance(stmt.ass_type(), astroid.AugAssign)
    -                   for stmt in stmts):
    -                continue
    -            stmt = stmts[0]
    -            if isinstance(stmt, astroid.Import):
    -                self.add_message('unused-import', args=name, node=stmt)
    -            elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
    -                if stmt.names[0][0] == '*':
    -                    self.add_message('unused-wildcard-import', args=name, node=stmt)
    -                else:
    -                    self.add_message('unused-import', args=name, node=stmt)
    +
    +        self._check_imports(not_consumed)
    +
    +    def _check_imports(self, not_consumed):
    +        local_names = _fix_dot_imports(not_consumed)
    +        checked = set()
    +        for name, stmt in local_names:
    +            for imports in stmt.names:
    +                real_name = imported_name = imports[0]
    +                if imported_name == "*":
    +                    real_name = name
    +                as_name = imports[1]
    +                if real_name in checked:
    +                    continue
    +                if name not in (real_name, as_name):
    +                    continue
    +                checked.add(real_name)
    +
    +                if (isinstance(stmt, astroid.Import) or
    +                        (isinstance(stmt, astroid.From) and
    +                         not stmt.modname)):
    +                    if (isinstance(stmt, astroid.From) and
    +                            SPECIAL_OBJ.search(imported_name)):
    +                        # Filter special objects (__doc__, __all__) etc.,
    +                        # because they can be imported for exporting.
    +                        continue
    +                    if as_name is None:
    +                        msg = "import %s" % imported_name
    +                    else:
    +                        msg = "%s imported as %s" % (imported_name, as_name)
    +                    self.add_message('unused-import', args=msg, node=stmt)
    +                elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
    +                    if SPECIAL_OBJ.search(imported_name):
    +                        # Filter special objects (__doc__, __all__) etc.,
    +                        # because they can be imported for exporting.
    +                        continue
    +                    if imported_name == '*':
    +                        self.add_message('unused-wildcard-import',
    +                                         args=name, node=stmt)
    +                    else:
    +                        if as_name is None:
    +                            msg = "%s imported from %s" % (imported_name, stmt.modname)
    +                        else:
    +                            fields = (imported_name, stmt.modname, as_name)
    +                            msg = "%s imported from %s as %s" % fields
    +                        self.add_message('unused-import', args=msg, node=stmt)
             del self._to_consume
     
         def visit_class(self, node):
    @@ -352,10 +514,21 @@ def leave_function(self, node):
             klass = node.parent.frame()
             if is_method and (klass.type == 'interface' or node.is_abstract()):
                 return
    +        if is_method and isinstance(klass, astroid.Class):
    +            confidence = INFERENCE if has_known_bases(klass) else INFERENCE_FAILURE
    +        else:
    +            confidence = HIGH
             authorized_rgx = self.config.dummy_variables_rgx
             called_overridden = False
             argnames = node.argnames()
    -        for name, stmts in not_consumed.iteritems():
    +        global_names = set()
    +        nonlocal_names = set()
    +        for global_stmt in node.nodes_of_class(astroid.Global):
    +            global_names.update(set(global_stmt.names))
    +        for nonlocal_stmt in node.nodes_of_class(astroid.Nonlocal):
    +            nonlocal_names.update(set(nonlocal_stmt.names))
    +
    +        for name, stmts in six.iteritems(not_consumed):
                 # ignore some special names specified by user configuration
                 if authorized_rgx.match(name):
                     continue
    @@ -364,6 +537,23 @@ def leave_function(self, node):
                 stmt = stmts[0]
                 if isinstance(stmt, astroid.Global):
                     continue
    +            if isinstance(stmt, (astroid.Import, astroid.From)):
    +                # Detect imports, assigned to global statements.
    +                if global_names:
    +                    skip = False
    +                    for import_name, import_alias in stmt.names:
    +                        # If the import uses an alias, check only that.
    +                        # Otherwise, check only the import name.
    +                        if import_alias:
    +                            if import_alias in global_names:
    +                                skip = True
    +                                break
    +                        elif import_name in global_names:
    +                            skip = True
    +                            break
    +                    if skip:
    +                        continue
    +
                 # care about functions with unknown argument (builtins)
                 if name in argnames:
                     if is_method:
    @@ -378,11 +568,16 @@ def leave_function(self, node):
                             continue
                         if node.name in PYMETHODS and node.name not in ('__init__', '__new__'):
                             continue
    -                # don't check callback arguments XXX should be configurable
    -                if node.name.startswith('cb_') or node.name.endswith('_cb'):
    +                # don't check callback arguments
    +                if any(node.name.startswith(cb) or node.name.endswith(cb)
    +                       for cb in self.config.callbacks):
                         continue
    -                self.add_message('unused-argument', args=name, node=stmt)
    +                self.add_message('unused-argument', args=name, node=stmt,
    +                                 confidence=confidence)
                 else:
    +                if stmt.parent and isinstance(stmt.parent, astroid.Assign):
    +                    if name in nonlocal_names:
    +                        continue
                     self.add_message('unused-variable', args=name, node=stmt)
     
         @check_messages('global-variable-undefined', 'global-variable-not-assigned', 'global-statement',
    @@ -410,8 +605,9 @@ def visit_global(self, node):
                         # same scope level assignment
                         break
                 else:
    -                # global but no assignment
    -                self.add_message('global-variable-not-assigned', args=name, node=node)
    +                if not _find_frame_imports(name, frame):
    +                    self.add_message('global-variable-not-assigned',
    +                                     args=name, node=node)
                     default_message = False
                 if not assign_nodes:
                     continue
    @@ -429,10 +625,16 @@ def visit_global(self, node):
             if default_message:
                 self.add_message('global-statement', node=node)
     
    -    def _check_late_binding_closure(self, node, assignment_node, scope_type):
    +    def _check_late_binding_closure(self, node, assignment_node):
    +        def _is_direct_lambda_call():
    +            return (isinstance(node_scope.parent, astroid.CallFunc)
    +                    and node_scope.parent.func is node_scope)
    +
             node_scope = node.scope()
             if not isinstance(node_scope, (astroid.Lambda, astroid.Function)):
                 return
    +        if isinstance(node.parent, astroid.Arguments):
    +            return
     
             if isinstance(assignment_node, astroid.Comprehension):
                 if assignment_node.parent.parent_of(node.scope()):
    @@ -445,9 +647,11 @@ def _check_late_binding_closure(self, node, assignment_node, scope_type):
                         break
                     maybe_for = maybe_for.parent
                 else:
    -                if maybe_for.parent_of(node_scope) and not isinstance(node_scope.statement(), astroid.Return):
    +                if (maybe_for.parent_of(node_scope)
    +                        and not _is_direct_lambda_call()
    +                        and not isinstance(node_scope.statement(), astroid.Return)):
                         self.add_message('cell-var-from-loop', node=node, args=node.name)
    -        
    +
         def _loopvar_name(self, node, name):
             # filter variables according to node's scope
             # XXX used to filter parents but don't remember why, and removing this
    @@ -474,7 +678,7 @@ def _loopvar_name(self, node, name):
                 _astmts = astmts[:1]
             for i, stmt in enumerate(astmts[1:]):
                 if (astmts[i].statement().parent_of(stmt)
    -                and not in_for_else_branch(astmts[i].statement(), stmt)):
    +                    and not in_for_else_branch(astmts[i].statement(), stmt)):
                     continue
                 _astmts.append(stmt)
             astmts = _astmts
    @@ -514,7 +718,7 @@ def visit_name(self, node):
             # a decorator, then start from the parent frame of the function instead
             # of the function frame - and thus open an inner class scope
             if (is_func_default(node) or is_func_decorator(node)
    -            or is_ancestor_name(frame, node)):
    +                or is_ancestor_name(frame, node)):
                 start_index = len(self._to_consume) - 2
             else:
                 start_index = len(self._to_consume) - 1
    @@ -528,14 +732,37 @@ def visit_name(self, node):
                 # names. The only exception is when the starting scope is a
                 # comprehension and its direct outer scope is a class
                 if scope_type == 'class' and i != start_index and not (
    -                base_scope_type == 'comprehension' and i == start_index-1):
    -                # XXX find a way to handle class scope in a smoother way
    -                continue
    +                    base_scope_type == 'comprehension' and i == start_index-1):
    +                # Detect if we are in a local class scope, as an assignment.
    +                # For example, the following is fair game.
    +                #
    +                # class A:
    +                #    b = 1
    +                #    c = lambda b=b: b * b
    +                #
    +                # class B:
    +                #    tp = 1
    +                #    def func(self, arg: tp):
    +                #        ...
    +
    +                in_annotation = (
    +                    PY3K and isinstance(frame, astroid.Function)
    +                    and node.statement() is frame and
    +                    (node in frame.args.annotations
    +                     or node is frame.args.varargannotation
    +                     or node is frame.args.kwargannotation))
    +                if in_annotation:
    +                    frame_locals = frame.parent.scope().locals
    +                else:
    +                    frame_locals = frame.locals
    +                if not ((isinstance(frame, astroid.Class) or in_annotation)
    +                        and name in frame_locals):
    +                    continue
                 # the name has already been consumed, only check it's not a loop
                 # variable used outside the loop
                 if name in consumed:
                     defnode = assign_parent(consumed[name][0])
    -                self._check_late_binding_closure(node, defnode, scope_type)
    +                self._check_late_binding_closure(node, defnode)
                     self._loopvar_name(node, name)
                     break
                 # mark the name as consumed if it's defined in this scope
    @@ -547,12 +774,12 @@ def visit_name(self, node):
                 # checks for use before assignment
                 defnode = assign_parent(to_consume[name][0])
                 if defnode is not None:
    -                self._check_late_binding_closure(node, defnode, scope_type)
    +                self._check_late_binding_closure(node, defnode)
                     defstmt = defnode.statement()
                     defframe = defstmt.frame()
                     maybee0601 = True
                     if not frame is defframe:
    -                    maybee0601 = False
    +                    maybee0601 = _detect_global_scope(node, frame, defframe)
                     elif defframe.parent is None:
                         # we are at the module level, check the name is not
                         # defined in builtins
    @@ -569,16 +796,71 @@ def visit_name(self, node):
                                 maybee0601 = not any(isinstance(child, astroid.Nonlocal)
                                                      and name in child.names
                                                      for child in defframe.get_children())
    +
    +                # Handle a couple of class scoping issues.
    +                annotation_return = False
    +                # The class reuses itself in the class scope.
    +                recursive_klass = (frame is defframe and
    +                                   defframe.parent_of(node) and
    +                                   isinstance(defframe, astroid.Class) and
    +                                   node.name == defframe.name)
    +                if (self._to_consume[-1][-1] == 'lambda' and
    +                        isinstance(frame, astroid.Class)
    +                        and name in frame.locals):
    +                    maybee0601 = True
    +                elif (isinstance(defframe, astroid.Class) and
    +                      isinstance(frame, astroid.Function)):
    +                    # Special rule for function return annotations,
    +                    # which uses the same name as the class where
    +                    # the function lives.
    +                    if (PY3K and node is frame.returns and
    +                            defframe.parent_of(frame.returns)):
    +                        maybee0601 = annotation_return = True
    +
    +                    if (maybee0601 and defframe.name in defframe.locals and
    +                            defframe.locals[name][0].lineno < frame.lineno):
    +                        # Detect class assignments with the same
    +                        # name as the class. In this case, no warning
    +                        # should be raised.
    +                        maybee0601 = False
    +                elif recursive_klass:
    +                    maybee0601 = True
    +                else:
    +                    maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
    +
                     if (maybee0601
    -                    and stmt.fromlineno <= defstmt.fromlineno
    -                    and not is_defined_before(node)
    -                    and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))):
    -                    if defstmt is stmt and isinstance(node, (astroid.DelName,
    -                                                             astroid.AssName)):
    +                        and not is_defined_before(node)
    +                        and not are_exclusive(stmt, defstmt, ('NameError',
    +                                                              'Exception',
    +                                                              'BaseException'))):
    +                    if recursive_klass or (defstmt is stmt and
    +                                           isinstance(node, (astroid.DelName,
    +                                                             astroid.AssName))):
    +                        self.add_message('undefined-variable', args=name, node=node)
    +                    elif annotation_return:
                             self.add_message('undefined-variable', args=name, node=node)
                         elif self._to_consume[-1][-1] != 'lambda':
    -                        # E0601 may *not* occurs in lambda scope
    +                        # E0601 may *not* occurs in lambda scope.
                             self.add_message('used-before-assignment', args=name, node=node)
    +                    elif self._to_consume[-1][-1] == 'lambda':
    +                        # E0601 can occur in class-level scope in lambdas, as in
    +                        # the following example:
    +                        #   class A:
    +                        #      x = lambda attr: f + attr
    +                        #      f = 42
    +                        if isinstance(frame, astroid.Class) and name in frame.locals:
    +                            if isinstance(node.parent, astroid.Arguments):
    +                                # Doing the following is fine:
    +                                #   class A:
    +                                #      x = 42
    +                                #      y = lambda attr=x: attr
    +                                if stmt.fromlineno <= defstmt.fromlineno:
    +                                    self.add_message('used-before-assignment',
    +                                                     args=name, node=node)
    +                            else:
    +                                self.add_message('undefined-variable',
    +                                                 args=name, node=node)
    +
                 if isinstance(node, astroid.AssName): # Aug AssName
                     del consumed[name]
                 else:
    @@ -599,7 +881,7 @@ def visit_import(self, node):
             for name, _ in node.names:
                 parts = name.split('.')
                 try:
    -                module = node.infer_name_module(parts[0]).next()
    +                module = next(node.infer_name_module(parts[0]))
                 except astroid.ResolveError:
                     continue
                 self._check_module_attrs(node, module, parts[1:])
    @@ -611,10 +893,7 @@ def visit_from(self, node):
             level = getattr(node, 'level', None)
             try:
                 module = node.root().import_module(name_parts[0], level=level)
    -        except AstroidBuildingException:
    -            return
    -        except Exception, exc:
    -            print 'Unhandled exception in VariablesChecker:', exc
    +        except Exception: # pylint: disable=broad-except
                 return
             module = self._check_module_attrs(node, module, name_parts[1:])
             if not module:
    @@ -645,10 +924,19 @@ def _check_unpacking(self, infered, node, targets):
             """
             if infered is astroid.YES:
                 return
    +        if (isinstance(infered.parent, astroid.Arguments) and
    +                isinstance(node.value, astroid.Name) and
    +                node.value.name == infered.parent.vararg):
    +            # Variable-length argument, we can't determine the length.
    +            return
             if isinstance(infered, (astroid.Tuple, astroid.List)):
                 # attempt to check unpacking is properly balanced
                 values = infered.itered()
                 if len(targets) != len(values):
    +                # Check if we have starred nodes.
    +                if any(isinstance(target, astroid.Starred)
    +                       for target in targets):
    +                    return
                     self.add_message('unbalanced-tuple-unpacking', node=node,
                                      args=(_get_unpacking_extra_info(node, infered),
                                            len(targets),
    @@ -675,17 +963,22 @@ def _check_module_attrs(self, node, module, module_names):
             if the latest access name corresponds to a module, return it
             """
             assert isinstance(module, astroid.Module), module
    +        ignored_modules = get_global_option(self, 'ignored-modules',
    +                                            default=[])
             while module_names:
                 name = module_names.pop(0)
                 if name == '__dict__':
                     module = None
                     break
                 try:
    -                module = module.getattr(name)[0].infer().next()
    +                module = next(module.getattr(name)[0].infer())
                     if module is astroid.YES:
                         return None
                 except astroid.NotFoundError:
    -                self.add_message('no-name-in-module', args=(name, module.name), node=node)
    +                if module.name in ignored_modules:
    +                    return None
    +                self.add_message('no-name-in-module',
    +                                 args=(name, module.name), node=node)
                     return None
                 except astroid.InferenceError:
                     return None
    @@ -720,16 +1013,51 @@ def leave_module(self, node):
             """ Update consumption analysis variable
             for metaclasses.
             """
    +        module_locals = self._to_consume[0][0]
    +        module_imports = self._to_consume[0][1]
    +        consumed = {}
    +
             for klass in node.nodes_of_class(astroid.Class):
    -            if klass._metaclass:
    -                metaclass = klass.metaclass()
    -                module_locals = self._to_consume[0][0]
    +            found = metaclass = name = None
    +            if not klass._metaclass:
    +                # Skip if this class doesn't use
    +                # explictly a metaclass, but inherits it from ancestors
    +                continue
    +
    +            metaclass = klass.metaclass()
     
    +            # Look the name in the already found locals.
    +            # If it's not found there, look in the module locals
    +            # and in the imported modules.
    +            if isinstance(klass._metaclass, astroid.Name):
    +                name = klass._metaclass.name
    +            elif metaclass:
    +                # if it uses a `metaclass=module.Class`
    +                name = metaclass.root().name
    +
    +            if name:
    +                found = consumed.setdefault(
    +                    name, module_locals.get(name, module_imports.get(name)))
    +
    +            if found is None and not metaclass:
    +                name = None
                     if isinstance(klass._metaclass, astroid.Name):
    -                    module_locals.pop(klass._metaclass.name, None)
    -                if metaclass:                
    -                    # if it uses a `metaclass=module.Class`                            
    -                    module_locals.pop(metaclass.root().name, None)
    +                    name = klass._metaclass.name
    +                elif isinstance(klass._metaclass, astroid.Getattr):
    +                    name = klass._metaclass.as_string()
    +
    +                if name is not None:
    +                    if not (name in astroid.Module.scope_attrs or
    +                            is_builtin(name) or
    +                            name in self.config.additional_builtins or
    +                            name in node.locals):
    +                        self.add_message('undefined-variable',
    +                                         node=klass,
    +                                         args=(name, ))
    +        # Pop the consumed items, in order to
    +        # avoid having unused-import false positives
    +        for name in consumed:
    +            module_locals.pop(name, None)
             super(VariablesChecker3k, self).leave_module(node)
     
     if sys.version_info >= (3, 0):
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py b/pymode/libs/pylint/config.py
    similarity index 91%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/config.py
    rename to pymode/libs/pylint/config.py
    index 992c2934..ebfe5789 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/config.py
    +++ b/pymode/libs/pylint/config.py
    @@ -17,6 +17,7 @@
     * pylint.d (PYLINTHOME)
     """
     from __future__ import with_statement
    +from __future__ import print_function
     
     import pickle
     import os
    @@ -50,15 +51,15 @@ def load_results(base):
         """
         data_file = get_pdata_path(base, 1)
         try:
    -        with open(data_file) as stream:
    +        with open(data_file, _PICK_LOAD) as stream:
                 return pickle.load(stream)
    -    except:
    +    except Exception: # pylint: disable=broad-except
             return {}
     
     if sys.version_info < (3, 0):
    -    _PICK_MOD = 'w'
    +    _PICK_DUMP, _PICK_LOAD = 'w', 'r'
     else:
    -    _PICK_MOD = 'wb'
    +    _PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
     
     def save_results(results, base):
         """pickle results"""
    @@ -66,13 +67,13 @@ def save_results(results, base):
             try:
                 os.mkdir(PYLINT_HOME)
             except OSError:
    -            print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME
    +            print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
         data_file = get_pdata_path(base, 1)
         try:
    -        with open(data_file, _PICK_MOD) as stream:
    +        with open(data_file, _PICK_DUMP) as stream:
                 pickle.dump(results, stream)
    -    except (IOError, OSError), ex:
    -        print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex)
    +    except (IOError, OSError) as ex:
    +        print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
     
     # location of the configuration file ##########################################
     
    diff --git a/pymode/libs/pylint/epylint.py b/pymode/libs/pylint/epylint.py
    new file mode 100644
    index 00000000..3d73ecd3
    --- /dev/null
    +++ b/pymode/libs/pylint/epylint.py
    @@ -0,0 +1,177 @@
    +# -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4
    +# -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4
    +# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
    +# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    +#
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Emacs and Flymake compatible Pylint.
    +
    +This script is for integration with emacs and is compatible with flymake mode.
    +
    +epylint walks out of python packages before invoking pylint. This avoids
    +reporting import errors that occur when a module within a package uses the
    +absolute import path to get another module within this package.
    +
    +For example:
    +    - Suppose a package is structured as
    +
    +        a/__init__.py
    +        a/b/x.py
    +        a/c/y.py
    +
    +   - Then if y.py imports x as "from a.b import x" the following produces pylint
    +     errors
    +
    +       cd a/c; pylint y.py
    +
    +   - The following obviously doesn't
    +
    +       pylint a/c/y.py
    +
    +   - As this script will be invoked by emacs within the directory of the file
    +     we are checking we need to go out of it to avoid these false positives.
    +
    +
    +You may also use py_run to run pylint with desired options and get back (or not)
    +its output.
    +"""
    +from __future__ import print_function
    +
    +import sys, os
    +import os.path as osp
    +from subprocess import Popen, PIPE
    +
    +def _get_env():
    +    '''Extracts the environment PYTHONPATH and appends the current sys.path to
    +    those.'''
    +    env = dict(os.environ)
    +    env['PYTHONPATH'] = os.pathsep.join(sys.path)
    +    return env
    +
    +def lint(filename, options=None):
    +    """Pylint the given file.
    +
    +    When run from emacs we will be in the directory of a file, and passed its
    +    filename.  If this file is part of a package and is trying to import other
    +    modules from within its own package or another package rooted in a directory
    +    below it, pylint will classify it as a failed import.
    +
    +    To get around this, we traverse down the directory tree to find the root of
    +    the package this module is in.  We then invoke pylint from this directory.
    +
    +    Finally, we must correct the filenames in the output generated by pylint so
    +    Emacs doesn't become confused (it will expect just the original filename,
    +    while pylint may extend it with extra directories if we've traversed down
    +    the tree)
    +    """
    +    # traverse downwards until we are out of a python package
    +    full_path = osp.abspath(filename)
    +    parent_path = osp.dirname(full_path)
    +    child_path = osp.basename(full_path)
    +
    +    while parent_path != "/" and osp.exists(osp.join(parent_path, '__init__.py')):
    +        child_path = osp.join(osp.basename(parent_path), child_path)
    +        parent_path = osp.dirname(parent_path)
    +
    +    # Start pylint
    +    # Ensure we use the python and pylint associated with the running epylint
    +    from pylint import lint as lint_mod
    +    lint_path = lint_mod.__file__
    +    options = options or ['--disable=C,R,I']
    +    cmd = [sys.executable, lint_path] + options + [
    +        '--msg-template', '{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}',
    +        '-r', 'n', child_path]
    +    process = Popen(cmd, stdout=PIPE, cwd=parent_path, env=_get_env(),
    +                    universal_newlines=True)
    +
    +    for line in process.stdout:
    +        # remove pylintrc warning
    +        if line.startswith("No config file found"):
    +            continue
    +
    +        # modify the file name thats output to reverse the path traversal we made
    +        parts = line.split(":")
    +        if parts and parts[0] == child_path:
    +            line = ":".join([filename] + parts[1:])
    +        print(line, end=' ')
    +
    +    process.wait()
    +    return process.returncode
    +
    +
    +def py_run(command_options='', return_std=False, stdout=None, stderr=None,
    +           script='epylint'):
    +    """Run pylint from python
    +
    +    ``command_options`` is a string containing ``pylint`` command line options;
    +    ``return_std`` (boolean) indicates return of created standard output
    +    and error (see below);
    +    ``stdout`` and ``stderr`` are 'file-like' objects in which standard output
    +    could be written.
    +
    +    Calling agent is responsible for stdout/err management (creation, close).
    +    Default standard output and error are those from sys,
    +    or standalone ones (``subprocess.PIPE``) are used
    +    if they are not set and ``return_std``.
    +
    +    If ``return_std`` is set to ``True``, this function returns a 2-uple
    +    containing standard output and error related to created process,
    +    as follows: ``(stdout, stderr)``.
    +
    +    A trivial usage could be as follows:
    +        >>> py_run( '--version')
    +        No config file found, using default configuration
    +        pylint 0.18.1,
    +            ...
    +
    +    To silently run Pylint on a module, and get its standard output and error:
    +        >>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
    +    """
    +    # Create command line to call pylint
    +    if os.name == 'nt':
    +        script += '.bat'
    +    command_line = script + ' ' + command_options
    +    # Providing standard output and/or error if not set
    +    if stdout is None:
    +        if return_std:
    +            stdout = PIPE
    +        else:
    +            stdout = sys.stdout
    +    if stderr is None:
    +        if return_std:
    +            stderr = PIPE
    +        else:
    +            stderr = sys.stderr
    +    # Call pylint in a subprocess
    +    p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr,
    +              env=_get_env(), universal_newlines=True)
    +    p.wait()
    +    # Return standard output and error
    +    if return_std:
    +        return (p.stdout, p.stderr)
    +
    +
    +def Run():
    +    if len(sys.argv) == 1:
    +        print("Usage: %s  [options]" % sys.argv[0])
    +        sys.exit(1)
    +    elif not osp.exists(sys.argv[1]):
    +        print("%s does not exist" % sys.argv[1])
    +        sys.exit(1)
    +    else:
    +        sys.exit(lint(sys.argv[1], sys.argv[2:]))
    +
    +
    +if __name__ == '__main__':
    +    Run()
    diff --git a/pymode/libs/pylint/gui.py b/pymode/libs/pylint/gui.py
    new file mode 100644
    index 00000000..8327e0ec
    --- /dev/null
    +++ b/pymode/libs/pylint/gui.py
    @@ -0,0 +1,531 @@
    +# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
    +# http://www.logilab.fr/ -- mailto:contact@logilab.fr
    +#
    +# This program is free software; you can redistribute it and/or modify it under
    +# the terms of the GNU General Public License as published by the Free Software
    +# Foundation; either version 2 of the License, or (at your option) any later
    +# version.
    +#
    +# This program is distributed in the hope that it will be useful, but WITHOUT
    +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
    +#
    +# You should have received a copy of the GNU General Public License along with
    +# this program; if not, write to the Free Software Foundation, Inc.,
    +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    +"""Tkinker gui for pylint"""
    +from __future__ import print_function
    +
    +import os
    +import sys
    +import re
    +from threading import Thread
    +
    +import six
    +
    +from six.moves.tkinter import (
    +    Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
    +    Checkbutton, Radiobutton, IntVar, StringVar, PanedWindow,
    +    TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
    +    HORIZONTAL, DISABLED, NORMAL, W,
    +)
    +from six.moves.tkinter_tkfiledialog import (
    +    askopenfilename, askdirectory,
    +)
    +
    +import pylint.lint
    +from pylint.reporters.guireporter import GUIReporter
    +
    +HOME = os.path.expanduser('~/')
    +HISTORY = '.pylint-gui-history'
    +COLORS = {'(I)':'green',
    +          '(C)':'blue', '(R)':'darkblue',
    +          '(W)':'black', '(E)':'darkred',
    +          '(F)':'red'}
    +
    +
    +def convert_to_string(msg):
    +    """make a string representation of a message"""
    +    module_object = msg.module
    +    if msg.obj:
    +        module_object += ".%s" % msg.obj
    +    return "(%s) %s [%d]: %s" % (msg.C, module_object, msg.line, msg.msg)
    +
    +class BasicStream(object):
    +    '''
    +    used in gui reporter instead of writing to stdout, it is written to
    +    this stream and saved in contents
    +    '''
    +    def __init__(self, gui):
    +        """init"""
    +        self.curline = ""
    +        self.gui = gui
    +        self.contents = []
    +        self.outdict = {}
    +        self.currout = None
    +        self.next_title = None
    +
    +    def write(self, text):
    +        """write text to the stream"""
    +        if re.match('^--+$', text.strip()) or re.match('^==+$', text.strip()):
    +            if self.currout:
    +                self.outdict[self.currout].remove(self.next_title)
    +                self.outdict[self.currout].pop()
    +            self.currout = self.next_title
    +            self.outdict[self.currout] = ['']
    +
    +        if text.strip():
    +            self.next_title = text.strip()
    +
    +        if text.startswith(os.linesep):
    +            self.contents.append('')
    +            if self.currout:
    +                self.outdict[self.currout].append('')
    +        self.contents[-1] += text.strip(os.linesep)
    +        if self.currout:
    +            self.outdict[self.currout][-1] += text.strip(os.linesep)
    +        if text.endswith(os.linesep) and text.strip():
    +            self.contents.append('')
    +            if self.currout:
    +                self.outdict[self.currout].append('')
    +
    +    def fix_contents(self):
    +        """finalize what the contents of the dict should look like before output"""
    +        for item in self.outdict:
    +            num_empty = self.outdict[item].count('')
    +            for _ in range(num_empty):
    +                self.outdict[item].remove('')
    +            if self.outdict[item]:
    +                self.outdict[item].pop(0)
    +
    +    def output_contents(self):
    +        """output contents of dict to the gui, and set the rating"""
    +        self.fix_contents()
    +        self.gui.tabs = self.outdict
    +        try:
    +            self.gui.rating.set(self.outdict['Global evaluation'][0])
    +        except KeyError:
    +            self.gui.rating.set('Error')
    +        self.gui.refresh_results_window()
    +
    +        #reset stream variables for next run
    +        self.contents = []
    +        self.outdict = {}
    +        self.currout = None
    +        self.next_title = None
    +
    +
    +class LintGui(object):
    +    """Build and control a window to interact with pylint"""
    +
    +    def __init__(self, root=None):
    +        """init"""
    +        self.root = root or Tk()
    +        self.root.title('Pylint')
    +        #reporter
    +        self.reporter = None
    +        #message queue for output from reporter
    +        self.msg_queue = six.moves.queue.Queue()
    +        self.msgs = []
    +        self.visible_msgs = []
    +        self.filenames = []
    +        self.rating = StringVar()
    +        self.tabs = {}
    +        self.report_stream = BasicStream(self)
    +        #gui objects
    +        self.lb_messages = None
    +        self.showhistory = None
    +        self.results = None
    +        self.btnRun = None
    +        self.information_box = None
    +        self.convention_box = None
    +        self.refactor_box = None
    +        self.warning_box = None
    +        self.error_box = None
    +        self.fatal_box = None
    +        self.txtModule = None
    +        self.status = None
    +        self.msg_type_dict = None
    +        self.init_gui()
    +
    +    def init_gui(self):
    +        """init helper"""
    +
    +        window = PanedWindow(self.root, orient="vertical")
    +        window.pack(side=TOP, fill=BOTH, expand=True)
    +
    +        top_pane = Frame(window)
    +        window.add(top_pane)
    +        mid_pane = Frame(window)
    +        window.add(mid_pane)
    +        bottom_pane = Frame(window)
    +        window.add(bottom_pane)
    +
    +        #setting up frames
    +        top_frame = Frame(top_pane)
    +        mid_frame = Frame(top_pane)
    +        history_frame = Frame(top_pane)
    +        radio_frame = Frame(mid_pane)
    +        rating_frame = Frame(mid_pane)
    +        res_frame = Frame(mid_pane)
    +        check_frame = Frame(bottom_pane)
    +        msg_frame = Frame(bottom_pane)
    +        btn_frame = Frame(bottom_pane)
    +        top_frame.pack(side=TOP, fill=X)
    +        mid_frame.pack(side=TOP, fill=X)
    +        history_frame.pack(side=TOP, fill=BOTH, expand=True)
    +        radio_frame.pack(side=TOP, fill=X)
    +        rating_frame.pack(side=TOP, fill=X)
    +        res_frame.pack(side=TOP, fill=BOTH, expand=True)
    +        check_frame.pack(side=TOP, fill=X)
    +        msg_frame.pack(side=TOP, fill=BOTH, expand=True)
    +        btn_frame.pack(side=TOP, fill=X)
    +
    +        # Binding F5 application-wide to run lint
    +        self.root.bind('', self.run_lint)
    +
    +        #Message ListBox
    +        rightscrollbar = Scrollbar(msg_frame)
    +        rightscrollbar.pack(side=RIGHT, fill=Y)
    +        bottomscrollbar = Scrollbar(msg_frame, orient=HORIZONTAL)
    +        bottomscrollbar.pack(side=BOTTOM, fill=X)
    +        self.lb_messages = Listbox(
    +            msg_frame,
    +            yscrollcommand=rightscrollbar.set,
    +            xscrollcommand=bottomscrollbar.set,
    +            bg="white")
    +        self.lb_messages.bind("", self.show_sourcefile)
    +        self.lb_messages.pack(expand=True, fill=BOTH)
    +        rightscrollbar.config(command=self.lb_messages.yview)
    +        bottomscrollbar.config(command=self.lb_messages.xview)
    +
    +        #History ListBoxes
    +        rightscrollbar2 = Scrollbar(history_frame)
    +        rightscrollbar2.pack(side=RIGHT, fill=Y)
    +        bottomscrollbar2 = Scrollbar(history_frame, orient=HORIZONTAL)
    +        bottomscrollbar2.pack(side=BOTTOM, fill=X)
    +        self.showhistory = Listbox(
    +            history_frame,
    +            yscrollcommand=rightscrollbar2.set,
    +            xscrollcommand=bottomscrollbar2.set,
    +            bg="white")
    +        self.showhistory.pack(expand=True, fill=BOTH)
    +        rightscrollbar2.config(command=self.showhistory.yview)
    +        bottomscrollbar2.config(command=self.showhistory.xview)
    +        self.showhistory.bind('', self.select_recent_file)
    +        self.set_history_window()
    +
    +        #status bar
    +        self.status = Label(self.root, text="", bd=1, relief=SUNKEN, anchor=W)
    +        self.status.pack(side=BOTTOM, fill=X)
    +
    +        #labelbl_ratingls
    +        lbl_rating_label = Label(rating_frame, text='Rating:')
    +        lbl_rating_label.pack(side=LEFT)
    +        lbl_rating = Label(rating_frame, textvariable=self.rating)
    +        lbl_rating.pack(side=LEFT)
    +        Label(mid_frame, text='Recently Used:').pack(side=LEFT)
    +        Label(top_frame, text='Module or package').pack(side=LEFT)
    +
    +        #file textbox
    +        self.txt_module = Entry(top_frame, background='white')
    +        self.txt_module.bind('', self.run_lint)
    +        self.txt_module.pack(side=LEFT, expand=True, fill=X)
    +
    +        #results box
    +        rightscrollbar = Scrollbar(res_frame)
    +        rightscrollbar.pack(side=RIGHT, fill=Y)
    +        bottomscrollbar = Scrollbar(res_frame, orient=HORIZONTAL)
    +        bottomscrollbar.pack(side=BOTTOM, fill=X)
    +        self.results = Listbox(
    +            res_frame,
    +            yscrollcommand=rightscrollbar.set,
    +            xscrollcommand=bottomscrollbar.set,
    +            bg="white", font="Courier")
    +        self.results.pack(expand=True, fill=BOTH, side=BOTTOM)
    +        rightscrollbar.config(command=self.results.yview)
    +        bottomscrollbar.config(command=self.results.xview)
    +
    +        #buttons
    +        Button(top_frame, text='Open', command=self.file_open).pack(side=LEFT)
    +        Button(top_frame, text='Open Package',
    +               command=(lambda: self.file_open(package=True))).pack(side=LEFT)
    +
    +        self.btnRun = Button(top_frame, text='Run', command=self.run_lint)
    +        self.btnRun.pack(side=LEFT)
    +        Button(btn_frame, text='Quit', command=self.quit).pack(side=BOTTOM)
    +
    +        #radio buttons
    +        self.information_box = IntVar()
    +        self.convention_box = IntVar()
    +        self.refactor_box = IntVar()
    +        self.warning_box = IntVar()
    +        self.error_box = IntVar()
    +        self.fatal_box = IntVar()
    +        i = Checkbutton(check_frame, text="Information", fg=COLORS['(I)'],
    +                        variable=self.information_box, command=self.refresh_msg_window)
    +        c = Checkbutton(check_frame, text="Convention", fg=COLORS['(C)'],
    +                        variable=self.convention_box, command=self.refresh_msg_window)
    +        r = Checkbutton(check_frame, text="Refactor", fg=COLORS['(R)'],
    +                        variable=self.refactor_box, command=self.refresh_msg_window)
    +        w = Checkbutton(check_frame, text="Warning", fg=COLORS['(W)'],
    +                        variable=self.warning_box, command=self.refresh_msg_window)
    +        e = Checkbutton(check_frame, text="Error", fg=COLORS['(E)'],
    +                        variable=self.error_box, command=self.refresh_msg_window)
    +        f = Checkbutton(check_frame, text="Fatal", fg=COLORS['(F)'],
    +                        variable=self.fatal_box, command=self.refresh_msg_window)
    +        i.select()
    +        c.select()
    +        r.select()
    +        w.select()
    +        e.select()
    +        f.select()
    +        i.pack(side=LEFT)
    +        c.pack(side=LEFT)
    +        r.pack(side=LEFT)
    +        w.pack(side=LEFT)
    +        e.pack(side=LEFT)
    +        f.pack(side=LEFT)
    +
    +        #check boxes
    +        self.box = StringVar()
    +        # XXX should be generated
    +        report = Radiobutton(
    +            radio_frame, text="Report", variable=self.box,
    +            value="Report", command=self.refresh_results_window)
    +        raw_met = Radiobutton(
    +            radio_frame, text="Raw metrics", variable=self.box,
    +            value="Raw metrics", command=self.refresh_results_window)
    +        dup = Radiobutton(
    +            radio_frame, text="Duplication", variable=self.box,
    +            value="Duplication", command=self.refresh_results_window)
    +        ext = Radiobutton(
    +            radio_frame, text="External dependencies",
    +            variable=self.box, value="External dependencies",
    +            command=self.refresh_results_window)
    +        stat = Radiobutton(
    +            radio_frame, text="Statistics by type",
    +            variable=self.box, value="Statistics by type",
    +            command=self.refresh_results_window)
    +        msg_cat = Radiobutton(
    +            radio_frame, text="Messages by category",
    +            variable=self.box, value="Messages by category",
    +            command=self.refresh_results_window)
    +        msg = Radiobutton(
    +            radio_frame, text="Messages", variable=self.box,
    +            value="Messages", command=self.refresh_results_window)
    +        source_file = Radiobutton(
    +            radio_frame, text="Source File", variable=self.box,
    +            value="Source File", command=self.refresh_results_window)
    +        report.select()
    +        report.grid(column=0, row=0, sticky=W)
    +        raw_met.grid(column=1, row=0, sticky=W)
    +        dup.grid(column=2, row=0, sticky=W)
    +        msg.grid(column=3, row=0, sticky=W)
    +        stat.grid(column=0, row=1, sticky=W)
    +        msg_cat.grid(column=1, row=1, sticky=W)
    +        ext.grid(column=2, row=1, sticky=W)
    +        source_file.grid(column=3, row=1, sticky=W)
    +
    +        #dictionary for check boxes and associated error term
    +        self.msg_type_dict = {
    +            'I': lambda: self.information_box.get() == 1,
    +            'C': lambda: self.convention_box.get() == 1,
    +            'R': lambda: self.refactor_box.get() == 1,
    +            'E': lambda: self.error_box.get() == 1,
    +            'W': lambda: self.warning_box.get() == 1,
    +            'F': lambda: self.fatal_box.get() == 1
    +        }
    +        self.txt_module.focus_set()
    +
    +
    +    def select_recent_file(self, event): # pylint: disable=unused-argument
    +        """adds the selected file in the history listbox to the Module box"""
    +        if not self.showhistory.size():
    +            return
    +
    +        selected = self.showhistory.curselection()
    +        item = self.showhistory.get(selected)
    +        #update module
    +        self.txt_module.delete(0, END)
    +        self.txt_module.insert(0, item)
    +
    +    def refresh_msg_window(self):
    +        """refresh the message window with current output"""
    +        #clear the window
    +        self.lb_messages.delete(0, END)
    +        self.visible_msgs = []
    +        for msg in self.msgs:
    +            if self.msg_type_dict.get(msg.C)():
    +                self.visible_msgs.append(msg)
    +                msg_str = convert_to_string(msg)
    +                self.lb_messages.insert(END, msg_str)
    +                fg_color = COLORS.get(msg_str[:3], 'black')
    +                self.lb_messages.itemconfigure(END, fg=fg_color)
    +
    +    def refresh_results_window(self):
    +        """refresh the results window with current output"""
    +        #clear the window
    +        self.results.delete(0, END)
    +        try:
    +            for res in self.tabs[self.box.get()]:
    +                self.results.insert(END, res)
    +        except KeyError:
    +            pass
    +
    +    def process_incoming(self):
    +        """process the incoming messages from running pylint"""
    +        while self.msg_queue.qsize():
    +            try:
    +                msg = self.msg_queue.get(0)
    +                if msg == "DONE":
    +                    self.report_stream.output_contents()
    +                    return False
    +
    +                #adding message to list of msgs
    +                self.msgs.append(msg)
    +
    +                #displaying msg if message type is selected in check box
    +                if self.msg_type_dict.get(msg.C)():
    +                    self.visible_msgs.append(msg)
    +                    msg_str = convert_to_string(msg)
    +                    self.lb_messages.insert(END, msg_str)
    +                    fg_color = COLORS.get(msg_str[:3], 'black')
    +                    self.lb_messages.itemconfigure(END, fg=fg_color)
    +
    +            except six.moves.queue.Empty:
    +                pass
    +        return True
    +
    +    def periodic_call(self):
    +        """determine when to unlock the run button"""
    +        if self.process_incoming():
    +            self.root.after(100, self.periodic_call)
    +        else:
    +            #enabling button so it can be run again
    +            self.btnRun.config(state=NORMAL)
    +
    +    def mainloop(self):
    +        """launch the mainloop of the application"""
    +        self.root.mainloop()
    +
    +    def quit(self, _=None):
    +        """quit the application"""
    +        self.root.quit()
    +
    +    def halt(self): # pylint: disable=no-self-use
    +        """program halt placeholder"""
    +        return
    +
    +    def file_open(self, package=False, _=None):
    +        """launch a file browser"""
    +        if not package:
    +            filename = askopenfilename(parent=self.root,
    +                                       filetypes=[('pythonfiles', '*.py'),
    +                                                  ('allfiles', '*')],
    +                                       title='Select Module')
    +        else:
    +            filename = askdirectory(title="Select A Folder", mustexist=1)
    +
    +        if filename == ():
    +            return
    +
    +        self.txt_module.delete(0, END)
    +        self.txt_module.insert(0, filename)
    +
    +    def update_filenames(self):
    +        """update the list of recent filenames"""
    +        filename = self.txt_module.get()
    +        if not filename:
    +            filename = os.getcwd()
    +        if filename+'\n' in self.filenames:
    +            index = self.filenames.index(filename+'\n')
    +            self.filenames.pop(index)
    +
    +        #ensure only 10 most recent are stored
    +        if len(self.filenames) == 10:
    +            self.filenames.pop()
    +        self.filenames.insert(0, filename+'\n')
    +
    +    def set_history_window(self):
    +        """update the history window with info from the history file"""
    +        #clear the window
    +        self.showhistory.delete(0, END)
    +        # keep the last 10 most recent files
    +        try:
    +            view_history = open(HOME+HISTORY, 'r')
    +            for hist in view_history.readlines():
    +                if not hist in self.filenames:
    +                    self.filenames.append(hist)
    +                self.showhistory.insert(END, hist.split('\n')[0])
    +            view_history.close()
    +        except IOError:
    +            # do nothing since history file will be created later
    +            return
    +
    +    def run_lint(self, _=None):
    +        """launches pylint"""
    +        self.update_filenames()
    +        self.root.configure(cursor='watch')
    +        self.reporter = GUIReporter(self, output=self.report_stream)
    +        module = self.txt_module.get()
    +        if not module:
    +            module = os.getcwd()
    +
    +        #cleaning up msgs and windows
    +        self.msgs = []
    +        self.visible_msgs = []
    +        self.lb_messages.delete(0, END)
    +        self.tabs = {}
    +        self.results.delete(0, END)
    +        self.btnRun.config(state=DISABLED)
    +
    +        #setting up a worker thread to run pylint
    +        worker = Thread(target=lint_thread, args=(module, self.reporter, self,))
    +        self.periodic_call()
    +        worker.start()
    +
    +        # Overwrite the .pylint-gui-history file with all the new recently added files
    +        # in order from filenames but only save last 10 files
    +        write_history = open(HOME+HISTORY, 'w')
    +        write_history.writelines(self.filenames)
    +        write_history.close()
    +        self.set_history_window()
    +
    +        self.root.configure(cursor='')
    +
    +    def show_sourcefile(self, event=None):  # pylint: disable=unused-argument
    +        selected = self.lb_messages.curselection()
    +        if not selected:
    +            return
    +
    +        msg = self.visible_msgs[int(selected[0])]
    +        scroll = msg.line - 3
    +        if scroll < 0:
    +            scroll = 0
    +
    +        self.tabs["Source File"] = open(msg.path, "r").readlines()
    +        self.box.set("Source File")
    +        self.refresh_results_window()
    +        self.results.yview(scroll)
    +        self.results.select_set(msg.line - 1)
    +
    +
    +def lint_thread(module, reporter, gui):
    +    """thread for pylint"""
    +    gui.status.text = "processing module(s)"
    +    pylint.lint.Run(args=[module], reporter=reporter, exit=False)
    +    gui.msg_queue.put("DONE")
    +
    +
    +def Run(args):
    +    """launch pylint gui from args"""
    +    if args:
    +        print('USAGE: pylint-gui\n launch a simple pylint gui using Tk')
    +        sys.exit(1)
    +    gui = LintGui()
    +    gui.mainloop()
    +    sys.exit(0)
    +
    +if __name__ == '__main__':
    +    Run(sys.argv[1:])
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py b/pymode/libs/pylint/interfaces.py
    similarity index 76%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py
    rename to pymode/libs/pylint/interfaces.py
    index 50f2c839..64f5a956 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/interfaces.py
    +++ b/pymode/libs/pylint/interfaces.py
    @@ -10,10 +10,22 @@
     # You should have received a copy of the GNU General Public License along with
     # this program; if not, write to the Free Software Foundation, Inc.,
     # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
    -"""Interfaces for PyLint objects"""
    +"""Interfaces for Pylint objects"""
    +from collections import namedtuple
     
     from logilab.common.interface import Interface
     
    +Confidence = namedtuple('Confidence', ['name', 'description'])
    +# Warning Certainties
    +HIGH = Confidence('HIGH', 'No false positive possible.')
    +INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
    +INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
    +                               'Warning based on inference with failures.')
    +UNDEFINED = Confidence('UNDEFINED',
    +                       'Warning without any associated confidence level.')
    +
    +CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
    +
     
     class IChecker(Interface):
         """This is an base interface, not designed to be used elsewhere than for
    @@ -34,7 +46,7 @@ class IRawChecker(IChecker):
         def process_module(self, astroid):
             """ process a module
     
    -        the module's content is accessible via astroid.file_stream
    +        the module's content is accessible via astroid.stream
             """
     
     
    diff --git a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py b/pymode/libs/pylint/lint.py
    similarity index 56%
    rename from pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py
    rename to pymode/libs/pylint/lint.py
    index 529fbd44..01fc2f5d 100644
    --- a/pymode/libs/pylama/lint/pylama_pylint/pylint/lint.py
    +++ b/pymode/libs/pylint/lint.py
    @@ -25,46 +25,67 @@
     
       Display help messages about given message identifiers and exit.
     """
    +from __future__ import print_function
     
    -# import this first to avoid builtin namespace pollution
    -from pylint.checkers import utils
    -
    -import functools
    -import sys
    +import collections
    +import contextlib
    +import itertools
    +import operator
     import os
    +try:
    +    import multiprocessing
    +except ImportError:
    +    multiprocessing = None
    +import sys
     import tokenize
    -from warnings import warn
    +import warnings
     
    -from logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn
    -from logilab.common.optik_ext import check_csv
    -from logilab.common.modutils import load_module_from_name, get_module_part
    -from logilab.common.interface import implements
    -from logilab.common.textutils import splitstrip, unquote
    -from logilab.common.ureports import Table, Text, Section
    -from logilab.common.__pkginfo__ import version as common_version
    -
    -from astroid import MANAGER, nodes, AstroidBuildingException
    +import astroid
     from astroid.__pkginfo__ import version as astroid_version
    -
    -from pylint.utils import (
    -    MSG_TYPES, OPTION_RGX,
    -    PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn, ReportsHandlerMixIn,
    -    EmptyReport, WarningScope,
    -    expand_modules, tokenize_module)
    -from pylint.interfaces import IRawChecker, ITokenChecker, IAstroidChecker
    -from pylint.checkers import (BaseTokenChecker,
    -                             table_lines_from_stats,
    -                             initialize as checkers_initialize)
    -from pylint.reporters import initialize as reporters_initialize
    +from astroid import modutils
    +from logilab.common import configuration
    +from logilab.common import optik_ext
    +from logilab.common import interface
    +from logilab.common import textutils
    +from logilab.common import ureports
    +from logilab.common import __version__ as common_version
    +import six
    +
    +from pylint import checkers
    +from pylint import interfaces
    +from pylint import reporters
    +from pylint import utils
     from pylint import config
    -
     from pylint.__pkginfo__ import version
     
     
    +MANAGER = astroid.MANAGER
    +INCLUDE_IDS_HELP = ("Deprecated. It was used to include message\'s "
    +                    "id in output. Use --msg-template instead.")
    +SYMBOLS_HELP = ("Deprecated. It was used to include symbolic ids of "
    +                "messages in output. Use --msg-template instead.")
    +
    +def _get_new_args(message):
    +    location = (
    +        message.abspath,
    +        message.path,
    +        message.module,
    +        message.obj,
    +        message.line,
    +        message.column,
    +    )
    +    return (
    +        message.msg_id,
    +        message.symbol,
    +        location,
    +        message.msg,
    +        message.confidence,
    +    )
     
     def _get_python_path(filepath):
    -    dirname = os.path.dirname(os.path.realpath(
    -            os.path.expanduser(filepath)))
    +    dirname = os.path.realpath(os.path.expanduser(filepath))
    +    if not os.path.isdir(dirname):
    +        dirname = os.path.dirname(dirname)
         while True:
             if not os.path.exists(os.path.join(dirname, "__init__.py")):
                 return dirname
    @@ -74,6 +95,38 @@ def _get_python_path(filepath):
                 return os.getcwd()
     
     
    +def _merge_stats(stats):
    +    merged = {}
    +    for stat in stats:
    +        for key, item in six.iteritems(stat):
    +            if key not in merged:
    +                merged[key] = item
    +            else:
    +                if isinstance(item, dict):
    +                    merged[key].update(item)
    +                else:
    +                    merged[key] = merged[key] + item
    +    return merged
    +
    +
    +@contextlib.contextmanager
    +def _patch_sysmodules():
    +    # Context manager that permits running pylint, on Windows, with -m switch
    +    # and with --jobs, as in 'python -2 -m pylint .. --jobs'.
    +    # For more details why this is needed,
    +    # see Python issue http://bugs.python.org/issue10845.
    +
    +    mock_main = __name__ != '__main__' # -m switch
    +    if mock_main:
    +        sys.modules['__main__'] = sys.modules[__name__]
    +
    +    try:
    +        yield
    +    finally:
    +        if mock_main:
    +            sys.modules.pop('__main__')
    +
    +
     # Python Linter class #########################################################
     
     MSGS = {
    @@ -93,7 +146,7 @@ def _get_python_path(filepath):
         'F0010': ('error while code parsing: %s',
                   'parse-error',
                   'Used when an exception occured while building the Astroid '
    -               'representation which could be handled by astroid.'),
    +              'representation which could be handled by astroid.'),
     
         'I0001': ('Unable to run raw checkers on built-in module %s',
                   'raw-checker-failed',
    @@ -130,7 +183,7 @@ def _get_python_path(filepath):
                   'deprecated-pragma',
                   'Some inline pylint options have been renamed or reworked, '
                   'only the most recent form should be used. '
    -              'NOTE:skip-all is only available with pylint >= 0.26', 
    +              'NOTE:skip-all is only available with pylint >= 0.26',
                   {'old_names': [('I0014', 'deprecated-disable-all')]}),
     
         'E0001': ('%s',
    @@ -146,15 +199,63 @@ def _get_python_path(filepath):
         }
     
     
    -def _deprecated_option(shortname, opt_type):
    -    def _warn_deprecated(option, optname, *args):
    +def _deprecated_option(shortname, opt_type, help_msg):
    +    def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
             sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,))
    -    return {'short': shortname, 'help': 'DEPRECATED', 'hide': True,
    +    return {'short': shortname, 'help': help_msg, 'hide': True,
                 'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated}
     
     
    -class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
    -               BaseTokenChecker):
    +if multiprocessing is not None:
    +    class ChildLinter(multiprocessing.Process): # pylint: disable=no-member
    +        def run(self):
    +            tasks_queue, results_queue, self._config = self._args # pylint: disable=no-member
    +
    +            self._config["jobs"] = 1  # Child does not parallelize any further.
    +            self._python3_porting_mode = self._config.pop(
    +                'python3_porting_mode', None)
    +
    +            # Run linter for received files/modules.
    +            for file_or_module in iter(tasks_queue.get, 'STOP'):
    +                result = self._run_linter(file_or_module[0])
    +                try:
    +                    results_queue.put(result)
    +                except Exception as ex:
    +                    print("internal error with sending report for module %s" %
    +                          file_or_module, file=sys.stderr)
    +                    print(ex, file=sys.stderr)
    +                    results_queue.put({})
    +
    +        def _run_linter(self, file_or_module):
    +            linter = PyLinter()
    +
    +            # Register standard checkers.
    +            linter.load_default_plugins()
    +            # Load command line plugins.
    +            # TODO linter.load_plugin_modules(self._plugins)
    +
    +            linter.load_configuration(**self._config)
    +            linter.set_reporter(reporters.CollectingReporter())
    +
    +            # Enable the Python 3 checker mode. This option is
    +            # passed down from the parent linter up to here, since
    +            # the Python 3 porting flag belongs to the Run class,
    +            # instead of the Linter class.
    +            if self._python3_porting_mode:
    +                linter.python3_porting_mode()
    +
    +            # Run the checks.
    +            linter.check(file_or_module)
    +
    +            msgs = [_get_new_args(m) for m in linter.reporter.messages]
    +            return (file_or_module, linter.file_state.base_name, linter.current_name,
    +                    msgs, linter.stats, linter.msg_status)
    +
    +
    +class PyLinter(configuration.OptionsManagerMixIn,
    +               utils.MessagesHandlerMixIn,
    +               utils.ReportsHandlerMixIn,
    +               checkers.BaseTokenChecker):
         """lint Python modules using external checkers.
     
         This is the main checker controlling the other ones and the reports
    @@ -168,13 +269,12 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
         to ensure the latest code version is actually checked.
         """
     
    -    __implements__ = (ITokenChecker,)
    +    __implements__ = (interfaces.ITokenChecker, )
     
         name = 'master'
         priority = 0
         level = 0
         msgs = MSGS
    -    may_be_disabled = False
     
         @staticmethod
         def make_options():
    @@ -182,7 +282,7 @@ def make_options():
                      {'type' : 'csv', 'metavar' : '[,...]',
                       'dest' : 'black_list', 'default' : ('CVS',),
                       'help' : 'Add files or directories to the blacklist. '
    -                  'They should be base names, not paths.'}),
    +                           'They should be base names, not paths.'}),
                     ('persistent',
                      {'default': True, 'type' : 'yn', 'metavar' : '',
                       'level': 1,
    @@ -192,88 +292,133 @@ def make_options():
                      {'type' : 'csv', 'metavar' : '', 'default' : (),
                       'level': 1,
                       'help' : 'List of plugins (as comma separated values of '
    -                  'python modules names) to load, usually to register '
    -                  'additional checkers.'}),
    +                           'python modules names) to load, usually to register '
    +                           'additional checkers.'}),
     
                     ('output-format',
                      {'default': 'text', 'type': 'string', 'metavar' : '',
                       'short': 'f',
                       'group': 'Reports',
                       'help' : 'Set the output format. Available formats are text,'
    -                  ' parseable, colorized, msvs (visual studio) and html. You '
    -                  'can also give a reporter class, eg mypackage.mymodule.'
    -                  'MyReporterClass.'}),
    +                           ' parseable, colorized, msvs (visual studio) and html. You '
    +                           'can also give a reporter class, eg mypackage.mymodule.'
    +                           'MyReporterClass.'}),
     
                     ('files-output',
                      {'default': 0, 'type' : 'yn', 'metavar' : '',
                       'group': 'Reports', 'level': 1,
                       'help' : 'Put messages in a separate file for each module / '
    -                  'package specified on the command line instead of printing '
    -                  'them on stdout. Reports (if any) will be written in a file '
    -                  'name "pylint_global.[txt|html]".'}),
    +                           'package specified on the command line instead of printing '
    +                           'them on stdout. Reports (if any) will be written in a file '
    +                           'name "pylint_global.[txt|html]".'}),
     
                     ('reports',
                      {'default': 1, 'type' : 'yn', 'metavar' : '',
                       'short': 'r',
                       'group': 'Reports',
                       'help' : 'Tells whether to display a full report or only the '
    -                  'messages'}),
    +                           'messages'}),
     
                     ('evaluation',
                      {'type' : 'string', 'metavar' : '',
                       'group': 'Reports', 'level': 1,
                       'default': '10.0 - ((float(5 * error + warning + refactor + '
    -                  'convention) / statement) * 10)',
    -                  'help' : 'Python expression which should return a note less \
    -than 10 (10 is the highest note). You have access to the variables errors \
    -warning, statement which respectively contain the number of errors / warnings\
    - messages and the total number of statements analyzed. This is used by the \
    - global evaluation report (RP0004).'}),
    +                             'convention) / statement) * 10)',
    +                  'help' : 'Python expression which should return a note less '
    +                           'than 10 (10 is the highest note). You have access '
    +                           'to the variables errors warning, statement which '
    +                           'respectively contain the number of errors / '
    +                           'warnings messages and the total number of '
    +                           'statements analyzed. This is used by the global '
    +                           'evaluation report (RP0004).'}),
     
                     ('comment',
                      {'default': 0, 'type' : 'yn', 'metavar' : '',
                       'group': 'Reports', 'level': 1,
                       'help' : 'Add a comment according to your evaluation note. '
    -                  'This is used by the global evaluation report (RP0004).'}),
    +                           'This is used by the global evaluation report (RP0004).'}),
    +
    +                ('confidence',
    +                 {'type' : 'multiple_choice', 'metavar': '',
    +                  'default': '',
    +                  'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
    +                  'group': 'Messages control',
    +                  'help' : 'Only show warnings with the listed confidence levels.'
    +                           ' Leave empty to show all. Valid levels: %s' % (
    +                               ', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
     
                     ('enable',
                      {'type' : 'csv', 'metavar': '',
                       'short': 'e',
                       'group': 'Messages control',
                       'help' : 'Enable the message, report, category or checker with the '
    -                  'given id(s). You can either give multiple identifier '
    -                  'separated by comma (,) or put this option multiple time. '
    -                  'See also the "--disable" option for examples. '}),
    +                           'given id(s). You can either give multiple identifier '
    +                           'separated by comma (,) or put this option multiple time. '
    +                           'See also the "--disable" option for examples. '}),
     
                     ('disable',
                      {'type' : 'csv', 'metavar': '',
                       'short': 'd',
                       'group': 'Messages control',
                       'help' : 'Disable the message, report, category or checker '
    -                  'with the given id(s). You can either give multiple identifiers'
    -                  ' separated by comma (,) or put this option multiple times '
    -                  '(only on the command line, not in the configuration file '
    -                  'where it should appear only once).'
    -                  'You can also use "--disable=all" to disable everything first '
    -                  'and then reenable specific checks. For example, if you want '
    -                  'to run only the similarities checker, you can use '
    -                  '"--disable=all --enable=similarities". '
    -                  'If you want to run only the classes checker, but have no '
    -                  'Warning level messages displayed, use'
    -                  '"--disable=all --enable=classes --disable=W"'}),
    +                           'with the given id(s). You can either give multiple identifiers'
    +                           ' separated by comma (,) or put this option multiple times '
    +                           '(only on the command line, not in the configuration file '
    +                           'where it should appear only once).'
    +                           'You can also use "--disable=all" to disable everything first '
    +                           'and then reenable specific checks. For example, if you want '
    +                           'to run only the similarities checker, you can use '
    +                           '"--disable=all --enable=similarities". '
    +                           'If you want to run only the classes checker, but have no '
    +                           'Warning level messages displayed, use'
    +                           '"--disable=all --enable=classes --disable=W"'}),
     
                     ('msg-template',
                      {'type' : 'string', 'metavar': '