diff --git a/.travis.yml b/.travis.yml index f98c4a7..239708f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,25 @@ language: python python: - - "2.6" - "2.7" -# - "3.2" -# - "3.3" -# - "3.4" -# - "3.5" -# - "3.5-dev" # 3.5 development branch -# - "nightly" # currently points to 3.6-dev -# command to install dependencies -#install: "pip install -r requirements.txt" -# command to run tests -script: python tests/run_tests.py + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.7-dev" + - "3.8-dev" + +os: linux +arch: + - amd64 + - arm64 + +install: + - pip install coverage + - pip install pypack + +script: + # run tests with coverage + - coverage run tests/run_tests.py + - coverage report -m + # test distribution packaging + - python -m pypack patch.py diff --git a/README.md b/README.md index 0be6e6d..6cca3fd 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,23 @@ Library to parse and apply unified diffs. -[![Build Status](https://img.shields.io/travis/techtonik/python-patch.svg)](https://travis-ci.org/techtonik/python-patch) +[![Build Status](https://img.shields.io/travis/techtonik/python-patch/master)](https://travis-ci.org/techtonik/python-patch/branches) [![PyPI](https://img.shields.io/pypi/v/patch)](https://pypi.python.org/pypi/patch) ### Features + * Python 2 and 3 compatible * Automatic correction of * Linefeeds according to patched file * Diffs broken by stripping trailing whitespace * a/ and b/ prefixes - * Single file, which is a command line tool and library + * Single file, which is a command line tool and a library * No dependencies outside Python stdlib * Patch format detection (SVN, HG, GIT) * Nice diffstat histogram - * Linux / Windows / OX X - * Python 2.5+ compatible, 2.6/2.7 tested + * Linux / Windows / OS X * Test coverage Things that don't work out of the box: - * Python 3 * File renaming, creation and removal * Directory tree operations * Version control specific properties @@ -34,24 +33,22 @@ module without external dependencies. You can also run the .zip file. - python patch-1.15.zip diff.patch + python patch-1.16.zip diff.patch ### Installation **patch.py** is self sufficient. You can copy it into your repository and use it from here. This setup will always be repeatable. But if you need to add `patch` module as a dependency, make sure to use strict -specifiers to avoid hitting an API break: - - pip install "patch>=1,<2" - -With pip 6.x.x and later it is possible to use the alternative syntax: +specifiers to avoid hitting an API break when version 2 is released: pip install "patch==1.*" ### Other stuff + * [CHANGES](doc/CHANGES.md) * [LICENSE](doc/LICENSE) * [CREDITS](doc/CREDITS) -* + +* [test coverage](http://techtonik.github.io/python-patch/tests/coverage/) diff --git a/doc/ADOPTIONS.md b/doc/ADOPTIONS.md index 255196e..bbc4ea0 100644 --- a/doc/ADOPTIONS.md +++ b/doc/ADOPTIONS.md @@ -1,5 +1,6 @@ | Project | Description | patch.py version | Reviewed | |:--------|:------------|:-----------------|:---------| +| [conda-recipes](https://github.com/conda/conda-recipes/tree/master/python-patch)| conda package | [1.12.11](https://github.com/conda/conda-recipes/blob/master/python-patch/patch.py) | 2016-01-17 | | [collective.recipe.patch](https://pypi.python.org/pypi/collective.recipe.patch/0.2.2) | buildout recipe for patching eggs | [8.06-1+](https://github.com/garbas/collective.recipe.patch/blob/master/collective/recipe/patch/patch.py) | 2014-01-17 | | [Linux Kernel Backports](https://backports.wiki.kernel.org/index.php/Documentation) | backporting Linux upstream device drivers for usage on older kernels | [1.12.12dev+](https://git.kernel.org/cgit/linux/kernel/git/backports/backports.git/tree/lib/patch.py) | 2014-01-17 | | [LuaPatch](http://lua-users.org/wiki/LuaPatch) | rewrite of patch.py for Lua by David Manura | 8.06-1| 2014-01-17 | diff --git a/doc/CHANGES.md b/doc/CHANGES.md index 766f119..418ed24 100644 --- a/doc/CHANGES.md +++ b/doc/CHANGES.md @@ -1,3 +1,8 @@ +##### 1.16 + + - Python 3 support, thanks to Yen Chi Hsuan (@yan12125) + (pull request #36) + ##### 1.15 - Project moved to GitHub diff --git a/doc/CREDITS b/doc/CREDITS index bf9b1a7..16cb9d7 100644 --- a/doc/CREDITS +++ b/doc/CREDITS @@ -7,3 +7,4 @@ Wladimir J. van der Laan (laanwj) azasypkin Philippe Ombredanne mspncp +Yen Chi Hsuan (@yan12125) diff --git a/doc/LICENSE b/doc/LICENSE index 5747b33..e172f7a 100644 --- a/doc/LICENSE +++ b/doc/LICENSE @@ -1,7 +1,7 @@ MIT License ----------- -Copyright (c) 2008-2015 anatoly techtonik +Copyright (c) 2008-2016 anatoly techtonik Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/doc/RELEASING b/doc/RELEASING index bafb65c..f367207 100644 --- a/doc/RELEASING +++ b/doc/RELEASING @@ -1,6 +1,7 @@ * [ ] Pack .zip archive - python ./other/pack.py patch.py + pip install pypack + python -m pypack patch.py * [ ] Write changelog @@ -11,3 +12,8 @@ * [ ] Update PyPI description * [ ] Download PKG-INFO * [ ] Edit and upload + +* [ ] Tag release + + git tag -a + git push --follow-tags diff --git a/doc/evolution-notes.txt b/doc/evolution-notes.txt index 784f5e6..7c544c2 100644 --- a/doc/evolution-notes.txt +++ b/doc/evolution-notes.txt @@ -13,6 +13,7 @@ patchset evolution 2. copy file 2. copy and rename 2. move and rename +2. remove file 3. know file attributes 3. know file mime-type diff --git a/other/pack.mainpy.tpl b/other/pack.mainpy.tpl deleted file mode 100644 index fa4b7a0..0000000 --- a/other/pack.mainpy.tpl +++ /dev/null @@ -1,4 +0,0 @@ -import sys - -import {{ module }} -sys.exit({{ module }}.main()) diff --git a/other/pack.py b/other/pack.py deleted file mode 100755 index 8ed523e..0000000 --- a/other/pack.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -""" -Wrap Python module into executable .zip file - -Public domain work by: - anatoly techtonik -""" -import os -import sys - -def get_version(path): - '''Read version info from a file without importing it''' - for line in open(path, 'rb'): - # Decode to unicode for PY2/PY3 in a fail-safe way - line = line.decode('cp437') - if '__version__' in line: - # __version__ = "0.9" - return line.split('"')[1] - -def zipadd(archive, filename, newname): - '''Add filename to archive. `newname` is required. Otherwise - zipfile may create unsafe entries, such as "../patch.py". - Returns open ZipFile object. - ''' - import zipfile - zf = zipfile.ZipFile(archive, 'a', zipfile.ZIP_DEFLATED) - zf.write(filename, newname) - return zf - -class MiniJinja(object): - """Template engine that knows how to render {{ tag }}""" - - def __init__(self, templates='.'): - """templates - template path""" - import re - import sys - self.PY3K = sys.version_info[0] == 3 - - self.path = templates + '/' - self.tag = re.compile('{{ *(?P\w+) *}}') - - def render(self, template, vardict=None, **kwargs): - """returns unicode str""" - data = vardict or {} - data.update(kwargs) - - def lookup(match): - return data[match.group('tag')] - - tpl = open(self.path + template).read() - if not self.PY3K: - return unicode(self.tag.sub(lookup, tpl)) - else: - return self.tag.sub(lookup, tpl) - -# --- - -BASE = os.path.abspath(os.path.dirname(__file__)) - -if __name__ == '__main__': - if not sys.argv[1:]: - sys.exit("usage: pack.py ") - - modpath = sys.argv[1] - modname = os.path.basename(modpath)[:-3] # also strip extension - version = get_version(modpath) - if version == None: - sys.exit("error: no __version__ specifier found in %s" % modpath) - packname = modname + "-" + version + ".zip" - print("[*] Packing %s into %s" % (modpath, packname)) - if os.path.exists(packname): - os.remove(packname) - zf = zipadd(packname, modpath, os.path.basename(modpath)) - print("[*] Making %s executable" % (packname)) - # http://techtonik.rainforce.org/2015/01/shipping-python-tools-in-executable-zip.html - text = MiniJinja(BASE).render('pack.mainpy.tpl', module=modname) - zf.writestr('__main__.py', text) - print("[*] Making %s installable" % (packname)) - text2 = MiniJinja(BASE).render('pack.setuppy.tpl', module=modname, version=version) - zf.writestr('setup.py', text2) - print("[*] Making %s uploadable to PyPI" % (packname)) - zf.writestr('PKG-INFO', '') - zf.close() - diff --git a/other/pack.setuppy.tpl b/other/pack.setuppy.tpl deleted file mode 100644 index f3fbf6e..0000000 --- a/other/pack.setuppy.tpl +++ /dev/null @@ -1,17 +0,0 @@ -from distutils.core import setup - -setup( - name='{{ module }}', - version='{{ version }}', - author='anatoly techtonik ', - url='https://github.com/techtonik/python-patch/', - - description='Patch utility to apply unified diffs', - license='MIT', - - py_modules=['{{ module }}'], - - classifiers=[ - 'Classifier: Programming Language :: Python :: 2 :: Only', - ], -) diff --git a/patch.py b/patch.py index b7ce194..4775d70 100755 --- a/patch.py +++ b/patch.py @@ -4,27 +4,56 @@ Brute-force line-by-line non-recursive parsing - Copyright (c) 2008-2015 anatoly techtonik + Copyright (c) 2008-2016 anatoly techtonik Available under the terms of MIT license - https://github.com/techtonik/python-patch/ - """ +from __future__ import print_function __author__ = "anatoly techtonik " -__version__ = "1.15" +__version__ = "1.16" +__license__ = "MIT" +__url__ = "https://github.com/techtonik/python-patch" import copy import logging import re + # cStringIO doesn't support unicode in 2.5 -from StringIO import StringIO -import urllib2 +try: + from StringIO import StringIO +except ImportError: + from io import BytesIO as StringIO # python 3 +try: + import urllib2 as urllib_request +except ImportError: + import urllib.request as urllib_request from os.path import exists, isfile, abspath import os import posixpath import shutil +import sys + + +PY3K = sys.version_info >= (3, 0) + +# PEP 3114 +if not PY3K: + compat_next = lambda gen: gen.next() +else: + compat_next = lambda gen: gen.__next__() + +def tostr(b): + """ Python 3 bytes encoder. Used to print filename in + diffstat output. Assumes that filenames are in utf-8. + """ + if not PY3K: + return b + + # [ ] figure out how to print non-utf-8 filenames without + # information loss + return b.decode('utf-8') #------------------------------------------------ @@ -96,18 +125,18 @@ def xisabs(filename): Returns True if `filename` is absolute on Linux, OS X or Windows. """ - if filename.startswith('/'): # Linux/Unix + if filename.startswith(b'/'): # Linux/Unix return True - elif filename.startswith('\\'): # Windows + elif filename.startswith(b'\\'): # Windows return True - elif re.match(r'\w:[\\/]', filename): # Windows + elif re.match(b'\\w:[\\\\/]', filename): # Windows return True return False def xnormpath(path): """ Cross-platform version of os.path.normpath """ # replace escapes and Windows slashes - normalized = posixpath.normpath(path).replace('\\', '/') + normalized = posixpath.normpath(path).replace(b'\\', b'/') # fold the result return posixpath.normpath(normalized) @@ -119,11 +148,11 @@ def xstrip(filename): """ while xisabs(filename): # strip windows drive with all slashes - if re.match(r'\w:[\\/]', filename): - filename = re.sub(r'^\w+:[\\/]+', '', filename) + if re.match(b'\\w:[\\\\/]', filename): + filename = re.sub(b'^\\w+:[\\\\/]+', b'', filename) # strip all slashes - elif re.match(r'[\\/]', filename): - filename = re.sub(r'^[\\/]+', '', filename) + elif re.match(b'[\\\\/]', filename): + filename = re.sub(b'^[\\\\/]+', b'', filename) return filename #----------------------------------------------- @@ -158,7 +187,7 @@ def fromurl(url): if an error occured. Note that this also can throw urlopen() exceptions. """ - ps = PatchSet( urllib2.urlopen(url) ) + ps = PatchSet( urllib_request.urlopen(url) ) if ps.errors == 0: return ps return False @@ -169,9 +198,9 @@ def fromurl(url): def pathstrip(path, n): """ Strip n leading components from the given path """ pathlist = [path] - while os.path.dirname(pathlist[0]) != '': + while os.path.dirname(pathlist[0]) != b'': pathlist[0:1] = os.path.split(pathlist[0]) - return '/'.join(pathlist[n:]) + return b'/'.join(pathlist[n:]) # --- /Utility function --- @@ -278,7 +307,7 @@ def next(self): return False try: - self._lineno, self._line = super(wrapumerate, self).next() + self._lineno, self._line = compat_next(super(wrapumerate, self)) except StopIteration: self._exhausted = True self._line = False @@ -308,7 +337,7 @@ def lineno(self): hunkparsed = False # state after successfully parsed hunk # regexp to match start of hunk, used groups - 1,3,4,6 - re_hunk_start = re.compile("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") + re_hunk_start = re.compile(br"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") self.errors = 0 # temp buffers for header and filenames info @@ -327,7 +356,7 @@ def lineno(self): hunkparsed = False if re_hunk_start.match(fe.line): hunkhead = True - elif fe.line.startswith("--- "): + elif fe.line.startswith(b"--- "): filenames = True else: headscan = True @@ -335,7 +364,7 @@ def lineno(self): # read out header if headscan: - while not fe.is_empty and not fe.line.startswith("--- "): + while not fe.is_empty and not fe.line.startswith(b"--- "): header.append(fe.line) fe.next() if fe.is_empty: @@ -343,7 +372,7 @@ def lineno(self): debug("no patch data found") # error is shown later self.errors += 1 else: - info("%d unparsed bytes left at the end of stream" % len(''.join(header))) + info("%d unparsed bytes left at the end of stream" % len(b''.join(header))) self.warnings += 1 # TODO check for \No new line at the end.. # TODO test for unparsed bytes @@ -364,26 +393,26 @@ def lineno(self): # [x] treat empty lines inside hunks as containing single space # (this happens when diff is saved by copy/pasting to editor # that strips trailing whitespace) - if line.strip("\r\n") == "": + if line.strip(b"\r\n") == b"": debug("expanding empty line in a middle of hunk body") self.warnings += 1 - line = ' ' + line + line = b' ' + line # process line first - if re.match(r"^[- \+\\]", line): + if re.match(b"^[- \\+\\\\]", line): # gather stats about line endings - if line.endswith("\r\n"): + if line.endswith(b"\r\n"): p.hunkends["crlf"] += 1 - elif line.endswith("\n"): + elif line.endswith(b"\n"): p.hunkends["lf"] += 1 - elif line.endswith("\r"): + elif line.endswith(b"\r"): p.hunkends["cr"] += 1 - if line.startswith("-"): + if line.startswith(b"-"): hunkactual["linessrc"] += 1 - elif line.startswith("+"): + elif line.startswith(b"+"): hunkactual["linestgt"] += 1 - elif not line.startswith("\\"): + elif not line.startswith(b"\\"): hunkactual["linessrc"] += 1 hunkactual["linestgt"] += 1 hunk.text.append(line) @@ -432,7 +461,7 @@ def lineno(self): # switch to hunkhead state hunkskip = False hunkhead = True - elif line.startswith("--- "): + elif line.startswith(b"--- "): # switch to filenames state hunkskip = False filenames = True @@ -440,7 +469,7 @@ def lineno(self): debug("- %2d hunks for %s" % (len(p.hunks), p.source)) if filenames: - if line.startswith("--- "): + if line.startswith(b"--- "): if srcname != None: # XXX testcase warning("skipping false patch for %s" % srcname) @@ -448,7 +477,7 @@ def lineno(self): # XXX header += srcname # double source filename line is encountered # attempt to restart from this second line - re_filename = "^--- ([^\t]+)" + re_filename = b"^--- ([^\t]+)" match = re.match(re_filename, line) # todo: support spaces in filenames if match: @@ -460,7 +489,7 @@ def lineno(self): # switch back to headscan state filenames = False headscan = True - elif not line.startswith("+++ "): + elif not line.startswith(b"+++ "): if srcname != None: warning("skipping invalid patch with no target for %s" % srcname) self.errors += 1 @@ -487,7 +516,7 @@ def lineno(self): filenames = False headscan = True else: - re_filename = "^\+\+\+ ([^\t]+)" + re_filename = br"^\+\+\+ ([^\t]+)" match = re.match(re_filename, line) if not match: warning("skipping invalid patch - no target filename at line %d" % (lineno+1)) @@ -513,7 +542,7 @@ def lineno(self): continue if hunkhead: - match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) + match = re.match(br"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) if not match: if not p.hunks: warning("skipping invalid patch with no hunks for file %s" % p.source) @@ -602,13 +631,13 @@ def _detect_type(self, p): # - next line is ===... delimiter # - filename is followed by revision number # TODO add SVN revision - if (len(p.header) > 1 and p.header[-2].startswith("Index: ") - and p.header[-1].startswith("="*67)): + if (len(p.header) > 1 and p.header[-2].startswith(b"Index: ") + and p.header[-1].startswith(b"="*67)): return SVN # common checks for both HG and GIT - DVCS = ((p.source.startswith('a/') or p.source == '/dev/null') - and (p.target.startswith('b/') or p.target == '/dev/null')) + DVCS = ((p.source.startswith(b'a/') or p.source == b'/dev/null') + and (p.target.startswith(b'b/') or p.target == b'/dev/null')) # GIT type check # - header[-2] is like "diff --git a/oldname b/newname" @@ -621,11 +650,11 @@ def _detect_type(self, p): if len(p.header) > 1: # detect the start of diff header - there might be some comments before for idx in reversed(range(len(p.header))): - if p.header[idx].startswith("diff --git"): + if p.header[idx].startswith(b"diff --git"): break - if p.header[idx].startswith('diff --git a/'): + if p.header[idx].startswith(b'diff --git a/'): if (idx+1 < len(p.header) - and re.match(r'index \w{7}..\w{7} \d{6}', p.header[idx+1])): + and re.match(b'index \\w{7}..\\w{7} \\d{6}', p.header[idx+1])): if DVCS: return GIT @@ -641,12 +670,12 @@ def _detect_type(self, p): # TODO add MQ # TODO add revision info if len(p.header) > 0: - if DVCS and re.match(r'diff -r \w{12} .*', p.header[-1]): + if DVCS and re.match(b'diff -r \\w{12} .*', p.header[-1]): return HG - if DVCS and p.header[-1].startswith('diff --git a/'): + if DVCS and p.header[-1].startswith(b'diff --git a/'): if len(p.header) == 1: # native Git patch header len is 2 return HG - elif p.header[0].startswith('# HG changeset patch'): + elif p.header[0].startswith(b'# HG changeset patch'): return HG return PLAIN @@ -674,12 +703,12 @@ def _normalize_filenames(self): # TODO: figure out how to deal with /dev/null entries debug("stripping a/ and b/ prefixes") if p.source != '/dev/null': - if not p.source.startswith("a/"): + if not p.source.startswith(b"a/"): warning("invalid source filename") else: p.source = p.source[2:] if p.target != '/dev/null': - if not p.target.startswith("b/"): + if not p.target.startswith(b"b/"): warning("invalid target filename") else: p.target = p.target[2:] @@ -687,18 +716,18 @@ def _normalize_filenames(self): p.source = xnormpath(p.source) p.target = xnormpath(p.target) - sep = '/' # sep value can be hardcoded, but it looks nice this way + sep = b'/' # sep value can be hardcoded, but it looks nice this way # references to parent are not allowed - if p.source.startswith(".." + sep): + if p.source.startswith(b".." + sep): warning("error: stripping parent path for source file patch no.%d" % (i+1)) self.warnings += 1 - while p.source.startswith(".." + sep): + while p.source.startswith(b".." + sep): p.source = p.source.partition(sep)[2] - if p.target.startswith(".." + sep): + if p.target.startswith(b".." + sep): warning("error: stripping parent path for target file patch no.%d" % (i+1)) self.warnings += 1 - while p.target.startswith(".." + sep): + while p.target.startswith(b".." + sep): p.target = p.target.partition(sep)[2] # absolute paths are not allowed if xisabs(p.source) or xisabs(p.target): @@ -732,10 +761,10 @@ def diffstat(self): i,d = 0,0 for hunk in patch.hunks: for line in hunk.text: - if line.startswith('+'): + if line.startswith(b'+'): i += 1 delta += len(line)-1 - elif line.startswith('-'): + elif line.startswith(b'-'): d += 1 delta -= len(line)-1 names.append(patch.target) @@ -762,10 +791,10 @@ def diffstat(self): # make sure every entry gets at least one + or - iwidth = 1 if 0 < iratio < 1 else int(iratio) dwidth = 1 if 0 < dratio < 1 else int(dratio) - #print iratio, dratio, iwidth, dwidth, histwidth + #print(iratio, dratio, iwidth, dwidth, histwidth) hist = "+"*int(iwidth) + "-"*int(dwidth) # -- /calculating +- histogram -- - output += (format % (names[i], insert[i] + delete[i], hist)) + output += (format % (tostr(names[i]), str(insert[i] + delete[i]), hist)) output += (" %d files changed, %d insertions(+), %d deletions(-), %+d bytes" % (len(names), sum(insert), sum(delete), delta)) @@ -781,7 +810,7 @@ def findfile(self, old, new): else: # [w] Google Code generates broken patches with its online editor debug("broken patch from Google Code, stripping prefixes..") - if old.startswith('a/') and new.startswith('b/'): + if old.startswith(b'a/') and new.startswith(b'b/'): old, new = old[2:], new[2:] debug(" %s" % old) debug(" %s" % new) @@ -840,7 +869,7 @@ def apply(self, strip=0, root=None): debug("processing %d/%d:\t %s" % (i+1, total, filename)) # validate before patching - f2fp = open(filename) + f2fp = open(filename, 'rb') hunkno = 0 hunk = p.hunks[hunkno] hunkfind = [] @@ -851,8 +880,8 @@ def apply(self, strip=0, root=None): if lineno+1 < hunk.startsrc: continue elif lineno+1 == hunk.startsrc: - hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"] - hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"] + hunkfind = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" -"] + hunkreplace = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" +"] #pprint(hunkreplace) hunklineno = 0 @@ -860,13 +889,13 @@ def apply(self, strip=0, root=None): # check hunks in source file if lineno+1 < hunk.startsrc+len(hunkfind)-1: - if line.rstrip("\r\n") == hunkfind[hunklineno]: + if line.rstrip(b"\r\n") == hunkfind[hunklineno]: hunklineno+=1 else: info("file %d/%d:\t %s" % (i+1, total, filename)) info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno+1)) info(" expected: %s" % hunkfind[hunklineno]) - info(" actual : %s" % line.rstrip("\r\n")) + info(" actual : %s" % line.rstrip(b"\r\n")) # not counting this as error, because file may already be patched. # check if file is already patched is done after the number of # invalid hunks if found @@ -908,7 +937,7 @@ def apply(self, strip=0, root=None): warning("source file is different - %s" % filename) errors += 1 if canpatch: - backupname = filename+".orig" + backupname = filename+b".orig" if exists(backupname): warning("can't backup original file to %s - aborting" % backupname) else: @@ -939,10 +968,12 @@ def _reverse(self): h.startsrc, h.starttgt = h.starttgt, h.startsrc h.linessrc, h.linestgt = h.linestgt, h.linessrc for i,line in enumerate(h.text): - if line[0] == '+': - h.text[i] = '-' + line[1:] - elif line[0] == '-': - h.text[i] = '+' +line[1:] + # need to use line[0:1] here, because line[0] + # returns int instead of bytes on Python 3 + if line[0:1] == b'+': + h.text[i] = b'-' + line[1:] + elif line[0:1] == b'-': + h.text[i] = b'+' +line[1:] def revert(self, strip=0, root=None): """ apply patch in reverse order """ @@ -967,7 +998,7 @@ def can_patch(self, filename): def _match_file_hunks(self, filepath, hunks): matched = True - fp = open(abspath(filepath)) + fp = open(abspath(filepath), 'rb') class NoMatch(Exception): pass @@ -985,13 +1016,13 @@ class NoMatch(Exception): line = fp.readline() lineno += 1 for hline in h.text: - if hline.startswith("-"): + if hline.startswith(b"-"): continue if not len(line): debug("check failed - premature eof on hunk: %d" % (hno+1)) # todo: \ No newline at the end of file raise NoMatch - if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"): + if line.rstrip(b"\r\n") != hline[1:].rstrip(b"\r\n"): debug("file is not patched - failed hunk: %d" % (hno+1)) raise NoMatch line = fp.readline() @@ -1020,7 +1051,7 @@ def patch_stream(self, instream, hunks): srclineno = 1 - lineends = {'\n':0, '\r\n':0, '\r':0} + lineends = {b'\n':0, b'\r\n':0, b'\r':0} def get_line(): """ local utility function - return line from source stream @@ -1028,12 +1059,12 @@ def get_line(): """ line = instream.readline() # 'U' mode works only with text files - if line.endswith("\r\n"): - lineends["\r\n"] += 1 - elif line.endswith("\n"): - lineends["\n"] += 1 - elif line.endswith("\r"): - lineends["\r"] += 1 + if line.endswith(b"\r\n"): + lineends[b"\r\n"] += 1 + elif line.endswith(b"\n"): + lineends[b"\n"] += 1 + elif line.endswith(b"\r"): + lineends[b"\r"] += 1 return line for hno, h in enumerate(hunks): @@ -1045,19 +1076,19 @@ def get_line(): for hline in h.text: # todo: check \ No newline at the end of file - if hline.startswith("-") or hline.startswith("\\"): + if hline.startswith(b"-") or hline.startswith(b"\\"): get_line() srclineno += 1 continue else: - if not hline.startswith("+"): + if not hline.startswith(b"+"): get_line() srclineno += 1 line2write = hline[1:] # detect if line ends are consistent in source file if sum([bool(lineends[x]) for x in lineends]) == 1: newline = [x for x in lineends if lineends[x] != 0][0] - yield line2write.rstrip("\r\n")+newline + yield line2write.rstrip(b"\r\n")+newline else: # newlines are mixed yield line2write @@ -1083,13 +1114,13 @@ def write_hunks(self, srcname, tgtname, hunks): def dump(self): for p in self.items: for headline in p.header: - print headline.rstrip('\n') - print '--- ' + p.source - print '+++ ' + p.target + print(headline.rstrip('\n')) + print('--- ' + p.source) + print('+++ ' + p.target) for h in p.hunks: - print '@@ -%s,%s +%s,%s @@' % (h.startsrc, h.linessrc, h.starttgt, h.linestgt) + print('@@ -%s,%s +%s,%s @@' % (h.startsrc, h.linessrc, h.starttgt, h.linestgt)) for line in h.text: - print line.rstrip('\n') + print(line.rstrip('\n')) def main(): @@ -1145,7 +1176,7 @@ def main(): patch = fromfile(patchfile) if options.diffstat: - print patch.diffstat() + print(patch.diffstat()) sys.exit(0) #pprint(patch) diff --git a/tests/coverage/___py_mylib_python-patch_patch.html b/tests/coverage/___py_mylib_python-patch_patch.html deleted file mode 100644 index 57bda2a..0000000 --- a/tests/coverage/___py_mylib_python-patch_patch.html +++ /dev/null @@ -1,2382 +0,0 @@ - - - - - - - - Coverage for C:\__py\mylib\python-patch\patch: 77% - - - - - - - - - - - - -
- -

Hot-keys on this page

-
-

- r - m - x - p   toggle line displays -

-

- j - k   next/prev highlighted chunk -

-

- 0   (zero) top of page -

-

- 1   (one) first highlighted chunk -

-
-
- -
- - - - - -
-

1

-

2

-

3

-

4

-

5

-

6

-

7

-

8

-

9

-

10

-

11

-

12

-

13

-

14

-

15

-

16

-

17

-

18

-

19

-

20

-

21

-

22

-

23

-

24

-

25

-

26

-

27

-

28

-

29

-

30

-

31

-

32

-

33

-

34

-

35

-

36

-

37

-

38

-

39

-

40

-

41

-

42

-

43

-

44

-

45

-

46

-

47

-

48

-

49

-

50

-

51

-

52

-

53

-

54

-

55

-

56

-

57

-

58

-

59

-

60

-

61

-

62

-

63

-

64

-

65

-

66

-

67

-

68

-

69

-

70

-

71

-

72

-

73

-

74

-

75

-

76

-

77

-

78

-

79

-

80

-

81

-

82

-

83

-

84

-

85

-

86

-

87

-

88

-

89

-

90

-

91

-

92

-

93

-

94

-

95

-

96

-

97

-

98

-

99

-

100

-

101

-

102

-

103

-

104

-

105

-

106

-

107

-

108

-

109

-

110

-

111

-

112

-

113

-

114

-

115

-

116

-

117

-

118

-

119

-

120

-

121

-

122

-

123

-

124

-

125

-

126

-

127

-

128

-

129

-

130

-

131

-

132

-

133

-

134

-

135

-

136

-

137

-

138

-

139

-

140

-

141

-

142

-

143

-

144

-

145

-

146

-

147

-

148

-

149

-

150

-

151

-

152

-

153

-

154

-

155

-

156

-

157

-

158

-

159

-

160

-

161

-

162

-

163

-

164

-

165

-

166

-

167

-

168

-

169

-

170

-

171

-

172

-

173

-

174

-

175

-

176

-

177

-

178

-

179

-

180

-

181

-

182

-

183

-

184

-

185

-

186

-

187

-

188

-

189

-

190

-

191

-

192

-

193

-

194

-

195

-

196

-

197

-

198

-

199

-

200

-

201

-

202

-

203

-

204

-

205

-

206

-

207

-

208

-

209

-

210

-

211

-

212

-

213

-

214

-

215

-

216

-

217

-

218

-

219

-

220

-

221

-

222

-

223

-

224

-

225

-

226

-

227

-

228

-

229

-

230

-

231

-

232

-

233

-

234

-

235

-

236

-

237

-

238

-

239

-

240

-

241

-

242

-

243

-

244

-

245

-

246

-

247

-

248

-

249

-

250

-

251

-

252

-

253

-

254

-

255

-

256

-

257

-

258

-

259

-

260

-

261

-

262

-

263

-

264

-

265

-

266

-

267

-

268

-

269

-

270

-

271

-

272

-

273

-

274

-

275

-

276

-

277

-

278

-

279

-

280

-

281

-

282

-

283

-

284

-

285

-

286

-

287

-

288

-

289

-

290

-

291

-

292

-

293

-

294

-

295

-

296

-

297

-

298

-

299

-

300

-

301

-

302

-

303

-

304

-

305

-

306

-

307

-

308

-

309

-

310

-

311

-

312

-

313

-

314

-

315

-

316

-

317

-

318

-

319

-

320

-

321

-

322

-

323

-

324

-

325

-

326

-

327

-

328

-

329

-

330

-

331

-

332

-

333

-

334

-

335

-

336

-

337

-

338

-

339

-

340

-

341

-

342

-

343

-

344

-

345

-

346

-

347

-

348

-

349

-

350

-

351

-

352

-

353

-

354

-

355

-

356

-

357

-

358

-

359

-

360

-

361

-

362

-

363

-

364

-

365

-

366

-

367

-

368

-

369

-

370

-

371

-

372

-

373

-

374

-

375

-

376

-

377

-

378

-

379

-

380

-

381

-

382

-

383

-

384

-

385

-

386

-

387

-

388

-

389

-

390

-

391

-

392

-

393

-

394

-

395

-

396

-

397

-

398

-

399

-

400

-

401

-

402

-

403

-

404

-

405

-

406

-

407

-

408

-

409

-

410

-

411

-

412

-

413

-

414

-

415

-

416

-

417

-

418

-

419

-

420

-

421

-

422

-

423

-

424

-

425

-

426

-

427

-

428

-

429

-

430

-

431

-

432

-

433

-

434

-

435

-

436

-

437

-

438

-

439

-

440

-

441

-

442

-

443

-

444

-

445

-

446

-

447

-

448

-

449

-

450

-

451

-

452

-

453

-

454

-

455

-

456

-

457

-

458

-

459

-

460

-

461

-

462

-

463

-

464

-

465

-

466

-

467

-

468

-

469

-

470

-

471

-

472

-

473

-

474

-

475

-

476

-

477

-

478

-

479

-

480

-

481

-

482

-

483

-

484

-

485

-

486

-

487

-

488

-

489

-

490

-

491

-

492

-

493

-

494

-

495

-

496

-

497

-

498

-

499

-

500

-

501

-

502

-

503

-

504

-

505

-

506

-

507

-

508

-

509

-

510

-

511

-

512

-

513

-

514

-

515

-

516

-

517

-

518

-

519

-

520

-

521

-

522

-

523

-

524

-

525

-

526

-

527

-

528

-

529

-

530

-

531

-

532

-

533

-

534

-

535

-

536

-

537

-

538

-

539

-

540

-

541

-

542

-

543

-

544

-

545

-

546

-

547

-

548

-

549

-

550

-

551

-

552

-

553

-

554

-

555

-

556

-

557

-

558

-

559

-

560

-

561

-

562

-

563

-

564

-

565

-

566

-

567

-

568

-

569

-

570

-

571

-

572

-

573

-

574

-

575

-

576

-

577

-

578

-

579

-

580

-

581

-

582

-

583

-

584

-

585

-

586

-

587

-

588

-

589

-

590

-

591

-

592

-

593

-

594

-

595

-

596

-

597

-

598

-

599

-

600

-

601

-

602

-

603

-

604

-

605

-

606

-

607

-

608

-

609

-

610

-

611

-

612

-

613

-

614

-

615

-

616

-

617

-

618

-

619

-

620

-

621

-

622

-

623

-

624

-

625

-

626

-

627

-

628

-

629

-

630

-

631

-

632

-

633

-

634

-

635

-

636

-

637

-

638

-

639

-

640

-

641

-

642

-

643

-

644

-

645

-

646

-

647

-

648

-

649

-

650

-

651

-

652

-

653

-

654

-

655

-

656

-

657

-

658

-

659

-

660

-

661

-

662

-

663

-

664

-

665

-

666

-

667

-

668

-

669

-

670

-

671

-

672

-

673

-

674

-

675

-

676

-

677

-

678

-

679

-

680

-

681

-

682

-

683

-

684

-

685

-

686

-

687

-

688

-

689

-

690

-

691

-

692

-

693

-

694

-

695

-

696

-

697

-

698

-

699

-

700

-

701

-

702

-

703

-

704

-

705

-

706

-

707

-

708

-

709

-

710

-

711

-

712

-

713

-

714

-

715

-

716

-

717

-

718

-

719

-

720

-

721

-

722

-

723

-

724

-

725

-

726

-

727

-

728

-

729

-

730

-

731

-

732

-

733

-

734

-

735

-

736

-

737

-

738

-

739

-

740

-

741

-

742

-

743

-

744

-

745

-

746

-

747

-

748

-

749

-

750

-

751

-

752

-

753

-

754

-

755

-

756

-

757

-

758

-

759

-

760

-

761

-

762

-

763

-

764

-

765

-

766

-

767

-

768

-

769

-

770

-

771

-

772

-

773

-

774

-

775

-

776

-

777

-

778

-

779

-

780

-

781

-

782

-

783

-

784

-

785

-

786

-

787

-

788

-

789

-

790

-

791

-

792

-

793

-

794

-

795

-

796

-

797

-

798

-

799

-

800

-

801

-

802

-

803

-

804

-

805

-

806

-

807

-

808

-

809

-

810

-

811

-

812

-

813

-

814

-

815

-

816

-

817

-

818

-

819

-

820

-

821

-

822

-

823

-

824

-

825

-

826

-

827

-

828

-

829

-

830

-

831

-

832

-

833

-

834

-

835

-

836

-

837

-

838

-

839

-

840

-

841

-

842

-

843

-

844

-

845

-

846

-

847

-

848

-

849

-

850

-

851

-

852

-

853

-

854

-

855

-

856

-

857

-

858

-

859

-

860

-

861

-

862

-

863

-

864

-

865

-

866

-

867

-

868

-

869

-

870

-

871

-

872

-

873

-

874

-

875

-

876

-

877

-

878

-

879

-

880

-

881

-

882

-

883

-

884

-

885

-

886

-

887

-

888

-

889

-

890

-

891

-

892

-

893

-

894

-

895

-

896

-

897

-

898

-

899

-

900

-

901

-

902

-

903

-

904

-

905

-

906

-

907

-

908

-

909

-

910

-

911

-

912

-

913

-

914

-

915

-

916

-

917

-

918

-

919

-

920

-

921

-

922

-

923

-

924

-

925

-

926

-

927

-

928

-

929

-

930

-

931

-

932

-

933

-

934

-

935

-

936

-

937

-

938

-

939

-

940

-

941

-

942

-

943

-

944

-

945

-

946

-

947

-

948

-

949

-

950

-

951

-

952

-

953

-

954

-

955

-

956

-

957

-

958

-

959

-

960

-

961

-

962

-

963

-

964

-

965

-

966

-

967

-

968

-

969

-

970

-

971

-

972

-

973

-

974

-

975

-

976

-

977

-

978

-

979

-

980

-

981

-

982

-

983

-

984

-

985

-

986

-

987

-

988

-

989

-

990

-

991

-

992

-

993

-

994

-

995

-

996

-

997

-

998

-

999

-

1000

-

1001

-

1002

-

1003

-

1004

-

1005

-

1006

-

1007

-

1008

-

1009

-

1010

-

1011

-

1012

-

1013

-

1014

-

1015

-

1016

-

1017

-

1018

-

1019

-

1020

-

1021

-

1022

-

1023

-

1024

-

1025

-

1026

-

1027

-

1028

-

1029

-

1030

-

1031

-

1032

-

1033

-

1034

-

1035

-

1036

-

1037

-

1038

-

1039

-

1040

-

1041

-

1042

-

1043

-

1044

-

1045

-

1046

-

1047

-

1048

-

1049

-

1050

-

1051

-

1052

-

1053

-

1054

-

1055

-

1056

-

1057

-

1058

-

1059

-

1060

-

1061

-

1062

-

1063

-

1064

-

1065

-

1066

-

1067

-

1068

-

1069

-

1070

-

1071

-

1072

-

1073

-

1074

-

1075

-

1076

-

1077

-

1078

-

1079

-

1080

-

1081

-

1082

-

1083

-

1084

-

1085

-

1086

-

1087

-

1088

-

1089

-

1090

-

1091

-

1092

-

1093

-

1094

-

1095

-

1096

-

1097

-

1098

-

1099

-

1100

-

1101

-

1102

-

1103

-

1104

-

1105

-

1106

-

1107

-

1108

-

1109

-

1110

-

1111

-

1112

-

1113

-

1114

-

1115

-

1116

-

1117

-

1118

-

1119

-

1120

-

1121

-

1122

-

1123

-

1124

-

1125

-

1126

-

1127

-

1128

-

1129

-

1130

-

1131

-

1132

-

1133

-

1134

-

1135

-

1136

-

1137

-

1138

-

1139

-

1140

-

1141

-

1142

-

1143

-

1144

-

1145

-

1146

-

1147

-

1148

-

1149

-

1150

- -
-

#!/usr/bin/env python 

-

""" Patch utility to apply unified diffs 

-

 

-

    Brute-force line-by-line non-recursive parsing  

-

 

-

    Copyright (c) 2008-2014 anatoly techtonik 

-

    Available under the terms of MIT license 

-

 

-

    Project home: http://code.google.com/p/python-patch/ 

-

 

-

 

-

    $Id$ 

-

    $HeadURL$ 

-

""" 

-

 

-

__author__ = "anatoly techtonik <techtonik@gmail.com>" 

-

__version__ = "1.14dev" 

-

 

-

import copy 

-

import logging 

-

import re 

-

# cStringIO doesn't support unicode in 2.5 

-

from StringIO import StringIO 

-

import urllib2 

-

 

-

from os.path import exists, isfile, abspath 

-

import os 

-

import posixpath 

-

import shutil 

-

 

-

 

-

#------------------------------------------------ 

-

# Logging is controlled by logger named after the 

-

# module name (e.g. 'patch' for patch.py module) 

-

 

-

debugmode = False 

-

 

-

logger = logging.getLogger(__name__) 

-

 

-

debug = logger.debug 

-

info = logger.info 

-

warning = logger.warning 

-

 

-

class NullHandler(logging.Handler): 

-

  """ Copied from Python 2.7 to avoid getting 

-

      `No handlers could be found for logger "patch"` 

-

      http://bugs.python.org/issue16539 

-

  """ 

-

  def handle(self, record): 

-

    pass 

-

  def emit(self, record): 

-

    pass 

-

  def createLock(self): 

-

    self.lock = None 

-

 

-

logger.addHandler(NullHandler()) 

-

 

-

#------------------------------------------------ 

-

# Constants for Patch/PatchSet types 

-

 

-

DIFF = PLAIN = "plain" 

-

GIT = "git" 

-

HG = MERCURIAL = "mercurial" 

-

SVN = SUBVERSION = "svn" 

-

# mixed type is only actual when PatchSet contains 

-

# Patches of different type 

-

MIXED = MIXED = "mixed" 

-

 

-

 

-

#------------------------------------------------ 

-

# Helpers (these could come with Python stdlib) 

-

 

-

# x...() function are used to work with paths in 

-

# cross-platform manner - all paths use forward 

-

# slashes even on Windows. 

-

 

-

def xisabs(filename): 

-

  """ Cross-platform version of `os.path.isabs()` 

-

      Returns True if `filename` is absolute on 

-

      Linux, OS X or Windows. 

-

  """ 

-

  if filename.startswith('/'):     # Linux/Unix 

-

    return True 

-

  elif filename.startswith('\\'):  # Windows 

-

    return True 

-

  elif re.match(r'\w:[\\/]', filename): # Windows 

-

    return True 

-

  return False 

-

 

-

def xnormpath(path): 

-

  """ Cross-platform version of os.path.normpath """ 

-

  # replace escapes and Windows slashes 

-

  normalized = posixpath.normpath(path).replace('\\', '/') 

-

  # fold the result 

-

  return posixpath.normpath(normalized) 

-

 

-

def xstrip(filename): 

-

  """ Make relative path out of absolute by stripping 

-

      prefixes used on Linux, OS X and Windows. 

-

 

-

      This function is critical for security. 

-

  """ 

-

  while xisabs(filename): 

-

    # strip windows drive with all slashes 

-

    if re.match(r'\w:[\\/]', filename): 

-

      filename = re.sub(r'^\w+:[\\/]+', '', filename) 

-

    # strip all slashes 

-

    elif re.match(r'[\\/]', filename): 

-

      filename = re.sub(r'^[\\/]+', '', filename) 

-

  return filename 

-

 

-

#----------------------------------------------- 

-

# Main API functions 

-

 

-

def fromfile(filename): 

-

  """ Parse patch file. If successful, returns 

-

      PatchSet() object. Otherwise returns False. 

-

  """ 

-

  patchset = PatchSet() 

-

  debug("reading %s" % filename) 

-

  fp = open(filename, "rb") 

-

  res = patchset.parse(fp) 

-

  fp.close() 

-

  if res == True: 

-

    return patchset 

-

  return False 

-

 

-

 

-

def fromstring(s): 

-

  """ Parse text string and return PatchSet() 

-

      object (or False if parsing fails) 

-

  """ 

-

  ps = PatchSet( StringIO(s) ) 

-

  if ps.errors == 0: 

-

    return ps 

-

  return False 

-

 

-

 

-

def fromurl(url): 

-

  """ Parse patch from an URL, return False 

-

      if an error occured. Note that this also 

-

      can throw urlopen() exceptions. 

-

  """ 

-

  ps = PatchSet( urllib2.urlopen(url) ) 

-

  if ps.errors == 0: 

-

    return ps 

-

  return False 

-

 

-

 

-

# --- Utility functions --- 

-

# [ ] reuse more universal pathsplit() 

-

def pathstrip(path, n): 

-

  """ Strip n leading components from the given path """ 

-

  pathlist = [path] 

-

  while os.path.dirname(pathlist[0]) != '': 

-

    pathlist[0:1] = os.path.split(pathlist[0]) 

-

  return '/'.join(pathlist[n:]) 

-

# --- /Utility function --- 

-

 

-

 

-

class Hunk(object): 

-

  """ Parsed hunk data container (hunk starts with @@ -R +R @@) """ 

-

 

-

  def __init__(self): 

-

    self.startsrc=None #: line count starts with 1 

-

    self.linessrc=None 

-

    self.starttgt=None 

-

    self.linestgt=None 

-

    self.invalid=False 

-

    self.desc='' 

-

    self.text=[] 

-

 

-

#  def apply(self, estream): 

-

#    """ write hunk data into enumerable stream 

-

#        return strings one by one until hunk is 

-

#        over 

-

# 

-

#        enumerable stream are tuples (lineno, line) 

-

#        where lineno starts with 0 

-

#    """ 

-

#    pass 

-

 

-

 

-

class Patch(object): 

-

  """ Patch for a single file. 

-

      If used as an iterable, returns hunks. 

-

  """ 

-

  def __init__(self): 

-

    self.source = None 

-

    self.target = None 

-

    self.hunks = [] 

-

    self.hunkends = [] 

-

    self.header = [] 

-

 

-

    self.type = None 

-

 

-

  def __iter__(self): 

-

    for h in self.hunks: 

-

      yield h 

-

 

-

 

-

class PatchSet(object): 

-

  """ PatchSet is a patch parser and container. 

-

      When used as an iterable, returns patches. 

-

  """ 

-

 

-

  def __init__(self, stream=None): 

-

    # --- API accessible fields --- 

-

 

-

    # name of the PatchSet (filename or ...) 

-

    self.name = None 

-

    # patch set type - one of constants 

-

    self.type = None 

-

 

-

    # list of Patch objects 

-

    self.items = [] 

-

 

-

    self.errors = 0    # fatal parsing errors 

-

    self.warnings = 0  # non-critical warnings 

-

    # --- /API --- 

-

 

-

    if stream: 

-

      self.parse(stream) 

-

 

-

  def __len__(self): 

-

    return len(self.items) 

-

 

-

  def __iter__(self): 

-

    for i in self.items: 

-

      yield i 

-

 

-

  def parse(self, stream): 

-

    """ parse unified diff 

-

        return True on success 

-

    """ 

-

    lineends = dict(lf=0, crlf=0, cr=0) 

-

    nexthunkno = 0    #: even if index starts with 0 user messages number hunks from 1 

-

 

-

    p = None 

-

    hunk = None 

-

    # hunkactual variable is used to calculate hunk lines for comparison 

-

    hunkactual = dict(linessrc=None, linestgt=None) 

-

 

-

 

-

    class wrapumerate(enumerate): 

-

      """Enumerate wrapper that uses boolean end of stream status instead of 

-

      StopIteration exception, and properties to access line information. 

-

      """ 

-

 

-

      def __init__(self, *args, **kwargs): 

-

        # we don't call parent, it is magically created by __new__ method 

-

 

-

        self._exhausted = False 

-

        self._lineno = False     # after end of stream equal to the num of lines 

-

        self._line = False       # will be reset to False after end of stream 

-

 

-

      def next(self): 

-

        """Try to read the next line and return True if it is available, 

-

           False if end of stream is reached.""" 

-

        if self._exhausted: 

-

          return False 

-

 

-

        try: 

-

          self._lineno, self._line = super(wrapumerate, self).next() 

-

        except StopIteration: 

-

          self._exhausted = True 

-

          self._line = False 

-

          return False 

-

        return True 

-

 

-

      @property 

-

      def is_empty(self): 

-

        return self._exhausted 

-

 

-

      @property 

-

      def line(self): 

-

        return self._line 

-

 

-

      @property 

-

      def lineno(self): 

-

        return self._lineno 

-

 

-

    # define states (possible file regions) that direct parse flow 

-

    headscan  = True  # start with scanning header 

-

    filenames = False # lines starting with --- and +++ 

-

 

-

    hunkhead = False  # @@ -R +R @@ sequence 

-

    hunkbody = False  # 

-

    hunkskip = False  # skipping invalid hunk mode 

-

 

-

    hunkparsed = False # state after successfully parsed hunk 

-

 

-

    # regexp to match start of hunk, used groups - 1,3,4,6 

-

    re_hunk_start = re.compile("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") 

-

 

-

    self.errors = 0 

-

    # temp buffers for header and filenames info 

-

    header = [] 

-

    srcname = None 

-

    tgtname = None 

-

 

-

    # start of main cycle 

-

    # each parsing block already has line available in fe.line 

-

    fe = wrapumerate(stream) 

-

    while fe.next(): 

-

 

-

      # -- deciders: these only switch state to decide who should process 

-

      # --           line fetched at the start of this cycle 

-

      if hunkparsed: 

-

        hunkparsed = False 

-

        if re_hunk_start.match(fe.line): 

-

            hunkhead = True 

-

        elif fe.line.startswith("--- "): 

-

            filenames = True 

-

        else: 

-

            headscan = True 

-

      # -- ------------------------------------ 

-

 

-

      # read out header 

-

      if headscan: 

-

        while not fe.is_empty and not fe.line.startswith("--- "): 

-

            header.append(fe.line) 

-

            fe.next() 

-

        if fe.is_empty: 

-

            if p == None: 

-

              debug("no patch data found")  # error is shown later 

-

              self.errors += 1 

-

            else: 

-

              info("%d unparsed bytes left at the end of stream" % len(''.join(header))) 

-

              self.warnings += 1 

-

              # TODO check for \No new line at the end..  

-

              # TODO test for unparsed bytes 

-

              # otherwise error += 1 

-

            # this is actually a loop exit 

-

            continue 

-

 

-

        headscan = False 

-

        # switch to filenames state 

-

        filenames = True 

-

 

-

      line = fe.line 

-

      lineno = fe.lineno 

-

 

-

 

-

      # hunkskip and hunkbody code skipped until definition of hunkhead is parsed 

-

      if hunkbody: 

-

        # [x] treat empty lines inside hunks as containing single space 

-

        #     (this happens when diff is saved by copy/pasting to editor 

-

        #      that strips trailing whitespace) 

-

        if line.strip("\r\n") == "": 

-

            debug("expanding empty line in a middle of hunk body") 

-

            self.warnings += 1 

-

            line = ' ' + line 

-

 

-

        # process line first 

-

        if re.match(r"^[- \+\\]", line): 

-

            # gather stats about line endings 

-

            if line.endswith("\r\n"): 

-

              p.hunkends["crlf"] += 1 

-

            elif line.endswith("\n"): 

-

              p.hunkends["lf"] += 1 

-

            elif line.endswith("\r"): 

-

              p.hunkends["cr"] += 1 

-

 

-

            if line.startswith("-"): 

-

              hunkactual["linessrc"] += 1 

-

            elif line.startswith("+"): 

-

              hunkactual["linestgt"] += 1 

-

            elif not line.startswith("\\"): 

-

              hunkactual["linessrc"] += 1 

-

              hunkactual["linestgt"] += 1 

-

            hunk.text.append(line) 

-

            # todo: handle \ No newline cases 

-

        else: 

-

            warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target)) 

-

            # add hunk status node 

-

            hunk.invalid = True 

-

            p.hunks.append(hunk) 

-

            self.errors += 1 

-

            # switch to hunkskip state 

-

            hunkbody = False 

-

            hunkskip = True 

-

 

-

        # check exit conditions 

-

        if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt: 

-

            warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target)) 

-

            # add hunk status node 

-

            hunk.invalid = True 

-

            p.hunks.append(hunk) 

-

            self.errors += 1 

-

            # switch to hunkskip state 

-

            hunkbody = False 

-

            hunkskip = True 

-

        elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]: 

-

            # hunk parsed successfully 

-

            p.hunks.append(hunk) 

-

            # switch to hunkparsed state 

-

            hunkbody = False 

-

            hunkparsed = True 

-

 

-

            # detect mixed window/unix line ends 

-

            ends = p.hunkends 

-

            if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1: 

-

              warning("inconsistent line ends in patch hunks for %s" % p.source) 

-

              self.warnings += 1 

-

            if debugmode: 

-

              debuglines = dict(ends) 

-

              debuglines.update(file=p.target, hunk=nexthunkno) 

-

              debug("crlf: %(crlf)d  lf: %(lf)d  cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines) 

-

            # fetch next line 

-

            continue 

-

 

-

      if hunkskip: 

-

        if re_hunk_start.match(line): 

-

          # switch to hunkhead state 

-

          hunkskip = False 

-

          hunkhead = True 

-

        elif line.startswith("--- "): 

-

          # switch to filenames state 

-

          hunkskip = False 

-

          filenames = True 

-

          if debugmode and len(self.items) > 0: 

-

            debug("- %2d hunks for %s" % (len(p.hunks), p.source)) 

-

 

-

      if filenames: 

-

        if line.startswith("--- "): 

-

          if srcname != None: 

-

            # XXX testcase 

-

            warning("skipping false patch for %s" % srcname) 

-

            srcname = None 

-

            # XXX header += srcname 

-

            # double source filename line is encountered 

-

            # attempt to restart from this second line 

-

          re_filename = "^--- ([^\t]+)" 

-

          match = re.match(re_filename, line) 

-

          # todo: support spaces in filenames 

-

          if match: 

-

            srcname = match.group(1).strip() 

-

          else: 

-

            warning("skipping invalid filename at line %d" % lineno) 

-

            self.errors += 1 

-

            # XXX p.header += line 

-

            # switch back to headscan state 

-

            filenames = False 

-

            headscan = True 

-

        elif not line.startswith("+++ "): 

-

          if srcname != None: 

-

            warning("skipping invalid patch with no target for %s" % srcname) 

-

            self.errors += 1 

-

            srcname = None 

-

            # XXX header += srcname 

-

            # XXX header += line 

-

          else: 

-

            # this should be unreachable 

-

            warning("skipping invalid target patch") 

-

          filenames = False 

-

          headscan = True 

-

        else: 

-

          if tgtname != None: 

-

            # XXX seems to be a dead branch   

-

            warning("skipping invalid patch - double target at line %d" % lineno) 

-

            self.errors += 1 

-

            srcname = None 

-

            tgtname = None 

-

            # XXX header += srcname 

-

            # XXX header += tgtname 

-

            # XXX header += line 

-

            # double target filename line is encountered 

-

            # switch back to headscan state 

-

            filenames = False 

-

            headscan = True 

-

          else: 

-

            re_filename = "^\+\+\+ ([^\t]+)" 

-

            match = re.match(re_filename, line) 

-

            if not match: 

-

              warning("skipping invalid patch - no target filename at line %d" % lineno) 

-

              self.errors += 1 

-

              srcname = None 

-

              # switch back to headscan state 

-

              filenames = False 

-

              headscan = True 

-

            else: 

-

              if p: # for the first run p is None 

-

                self.items.append(p) 

-

              p = Patch() 

-

              p.source = srcname 

-

              srcname = None 

-

              p.target = match.group(1).strip() 

-

              p.header = header 

-

              header = [] 

-

              # switch to hunkhead state 

-

              filenames = False 

-

              hunkhead = True 

-

              nexthunkno = 0 

-

              p.hunkends = lineends.copy() 

-

              continue 

-

 

-

      if hunkhead: 

-

        match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) 

-

        if not match: 

-

          if not p.hunks: 

-

            warning("skipping invalid patch with no hunks for file %s" % p.source) 

-

            self.errors += 1 

-

            # XXX review switch 

-

            # switch to headscan state 

-

            hunkhead = False 

-

            headscan = True 

-

            continue 

-

          else: 

-

            # TODO review condition case 

-

            # switch to headscan state 

-

            hunkhead = False 

-

            headscan = True 

-

        else: 

-

          hunk = Hunk() 

-

          hunk.startsrc = int(match.group(1)) 

-

          hunk.linessrc = 1 

-

          if match.group(3): hunk.linessrc = int(match.group(3)) 

-

          hunk.starttgt = int(match.group(4)) 

-

          hunk.linestgt = 1 

-

          if match.group(6): hunk.linestgt = int(match.group(6)) 

-

          hunk.invalid = False 

-

          hunk.desc = match.group(7)[1:].rstrip() 

-

          hunk.text = [] 

-

 

-

          hunkactual["linessrc"] = hunkactual["linestgt"] = 0 

-

 

-

          # switch to hunkbody state 

-

          hunkhead = False 

-

          hunkbody = True 

-

          nexthunkno += 1 

-

          continue 

-

 

-

    # /while fe.next() 

-

 

-

    if p: 

-

      self.items.append(p) 

-

 

-

    if not hunkparsed: 

-

      if hunkskip: 

-

        warning("warning: finished with errors, some hunks may be invalid") 

-

      elif headscan: 

-

        if len(self.items) == 0: 

-

          warning("error: no patch data found!") 

-

          return False 

-

        else: # extra data at the end of file 

-

          pass 

-

      else: 

-

        warning("error: patch stream is incomplete!") 

-

        self.errors += 1 

-

        if len(self.items) == 0: 

-

          return False 

-

 

-

    if debugmode and len(self.items) > 0: 

-

        debug("- %2d hunks for %s" % (len(p.hunks), p.source)) 

-

 

-

    # XXX fix total hunks calculation 

-

    debug("total files: %d  total hunks: %d" % (len(self.items), 

-

        sum(len(p.hunks) for p in self.items))) 

-

 

-

    # ---- detect patch and patchset types ---- 

-

    for idx, p in enumerate(self.items): 

-

      self.items[idx].type = self._detect_type(p) 

-

 

-

    types = set([p.type for p in self.items]) 

-

    if len(types) > 1: 

-

      self.type = MIXED 

-

    else: 

-

      self.type = types.pop() 

-

    # -------- 

-

 

-

    self._normalize_filenames() 

-

 

-

    return (self.errors == 0) 

-

 

-

  def _detect_type(self, p): 

-

    """ detect and return type for the specified Patch object 

-

        analyzes header and filenames info 

-

 

-

        NOTE: must be run before filenames are normalized 

-

    """ 

-

 

-

    # check for SVN 

-

    #  - header starts with Index: 

-

    #  - next line is ===... delimiter 

-

    #  - filename is followed by revision number 

-

    # TODO add SVN revision 

-

    if (len(p.header) > 1 and p.header[-2].startswith("Index: ") 

-

          and p.header[-1].startswith("="*67)): 

-

        return SVN 

-

 

-

    # common checks for both HG and GIT 

-

    DVCS = ((p.source.startswith('a/') or p.source == '/dev/null') 

-

        and (p.target.startswith('b/') or p.target == '/dev/null')) 

-

 

-

    # GIT type check 

-

    #  - header[-2] is like "diff --git a/oldname b/newname" 

-

    #  - header[-1] is like "index <hash>..<hash> <mode>" 

-

    # TODO add git rename diffs and add/remove diffs 

-

    #      add git diff with spaced filename 

-

    # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html 

-

 

-

    # detect the start of diff header - there might be some comments before 

-

    if len(p.header) > 1: 

-

      for idx in reversed(range(len(p.header))): 

-

        if p.header[idx].startswith("diff --git"): 

-

          break 

-

      if re.match(r'diff --git a/[\w/.]+ b/[\w/.]+', p.header[idx]): 

-

        if (idx+1 < len(p.header) 

-

            and re.match(r'index \w{7}..\w{7} \d{6}', p.header[idx+1])): 

-

          if DVCS: 

-

            return GIT 

-

 

-

    # HG check 

-

    #  

-

    #  - for plain HG format header is like "diff -r b2d9961ff1f5 filename" 

-

    #  - for Git-style HG patches it is "diff --git a/oldname b/newname" 

-

    #  - filename starts with a/, b/ or is equal to /dev/null 

-

    #  - exported changesets also contain the header 

-

    #    # HG changeset patch 

-

    #    # User name@example.com 

-

    #    ...    

-

    # TODO add MQ 

-

    # TODO add revision info 

-

    if len(p.header) > 0: 

-

      if DVCS and re.match(r'diff -r \w{12} .*', p.header[-1]): 

-

        return HG 

-

      if DVCS and p.header[-1].startswith('diff --git a/'): 

-

        if len(p.header) == 1:  # native Git patch header len is 2 

-

          return HG 

-

        elif p.header[0].startswith('# HG changeset patch'): 

-

          return HG 

-

 

-

    return PLAIN 

-

 

-

 

-

  def _normalize_filenames(self): 

-

    """ sanitize filenames, normalizing paths, i.e.: 

-

        1. strip a/ and b/ prefixes from GIT and HG style patches 

-

        2. remove all references to parent directories (with warning) 

-

        3. translate any absolute paths to relative (with warning) 

-

 

-

        [x] always use forward slashes to be crossplatform 

-

            (diff/patch were born as a unix utility after all) 

-

         

-

        return None 

-

    """ 

-

    for i,p in enumerate(self.items): 

-

      if p.type in (HG, GIT): 

-

        # TODO: figure out how to deal with /dev/null entries 

-

        debug("stripping a/ and b/ prefixes") 

-

        if p.source != '/dev/null': 

-

          if not p.source.startswith("a/"): 

-

            warning("invalid source filename") 

-

          else: 

-

            p.source = p.source[2:] 

-

        if p.target != '/dev/null': 

-

          if not p.target.startswith("b/"): 

-

            warning("invalid target filename") 

-

          else: 

-

            p.target = p.target[2:] 

-

 

-

      p.source = xnormpath(p.source) 

-

      p.target = xnormpath(p.target) 

-

 

-

      sep = '/'  # sep value can be hardcoded, but it looks nice this way 

-

 

-

      # references to parent are not allowed 

-

      if p.source.startswith(".." + sep): 

-

        warning("error: stripping parent path for source file patch no.%d" % (i+1)) 

-

        self.warnings += 1 

-

        while p.source.startswith(".." + sep): 

-

          p.source = p.source.partition(sep)[2] 

-

      if p.target.startswith(".." + sep): 

-

        warning("error: stripping parent path for target file patch no.%d" % (i+1)) 

-

        self.warnings += 1 

-

        while p.target.startswith(".." + sep): 

-

          p.target = p.target.partition(sep)[2] 

-

      # absolute paths are not allowed 

-

      if xisabs(p.source) or xisabs(p.target): 

-

        warning("error: absolute paths are not allowed - file no.%d" % (i+1)) 

-

        self.warnings += 1 

-

        if xisabs(p.source): 

-

          warning("stripping absolute path from source name '%s'" % p.source) 

-

          p.source = xstrip(p.source) 

-

        if xisabs(p.target): 

-

          warning("stripping absolute path from target name '%s'" % p.target) 

-

          p.target = xstrip(p.target) 

-

 

-

      self.items[i].source = p.source 

-

      self.items[i].target = p.target 

-

 

-

 

-

  def diffstat(self): 

-

    """ calculate diffstat and return as a string 

-

        Notes: 

-

          - original diffstat ouputs target filename 

-

          - single + or - shouldn't escape histogram 

-

    """ 

-

    names = [] 

-

    insert = [] 

-

    delete = [] 

-

    delta = 0    # size change in bytes 

-

    namelen = 0 

-

    maxdiff = 0  # max number of changes for single file 

-

                 # (for histogram width calculation) 

-

    for patch in self.items: 

-

      i,d = 0,0 

-

      for hunk in patch.hunks: 

-

        for line in hunk.text: 

-

          if line.startswith('+'): 

-

            i += 1 

-

            delta += len(line)-1 

-

          elif line.startswith('-'): 

-

            d += 1 

-

            delta -= len(line)-1 

-

      names.append(patch.target) 

-

      insert.append(i) 

-

      delete.append(d) 

-

      namelen = max(namelen, len(patch.target)) 

-

      maxdiff = max(maxdiff, i+d) 

-

    output = '' 

-

    statlen = len(str(maxdiff))  # stats column width 

-

    for i,n in enumerate(names): 

-

      # %-19s | %-4d %s 

-

      format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n" 

-

 

-

      hist = '' 

-

      # -- calculating histogram -- 

-

      width = len(format % ('', '', '')) 

-

      histwidth = max(2, 80 - width) 

-

      if maxdiff < histwidth: 

-

        hist = "+"*insert[i] + "-"*delete[i] 

-

      else: 

-

        iratio = (float(insert[i]) / maxdiff) * histwidth 

-

        dratio = (float(delete[i]) / maxdiff) * histwidth 

-

 

-

        # make sure every entry gets at least one + or - 

-

        iwidth = 1 if 0 < iratio < 1 else int(iratio) 

-

        dwidth = 1 if 0 < dratio < 1 else int(dratio) 

-

        #print iratio, dratio, iwidth, dwidth, histwidth 

-

        hist = "+"*int(iwidth) + "-"*int(dwidth) 

-

      # -- /calculating +- histogram -- 

-

      output += (format % (names[i], insert[i] + delete[i], hist)) 

-

 

-

    output += (" %d files changed, %d insertions(+), %d deletions(-), %+d bytes" 

-

               % (len(names), sum(insert), sum(delete), delta)) 

-

    return output 

-

 

-

 

-

  def findfile(self, old, new): 

-

    """ return name of file to be patched or None """ 

-

    if exists(old): 

-

      return old 

-

    elif exists(new): 

-

      return new 

-

    else: 

-

      # [w] Google Code generates broken patches with its online editor 

-

      debug("broken patch from Google Code, stripping prefixes..") 

-

      if old.startswith('a/') and new.startswith('b/'): 

-

        old, new = old[2:], new[2:] 

-

        debug("   %s" % old) 

-

        debug("   %s" % new) 

-

        if exists(old): 

-

          return old 

-

        elif exists(new): 

-

          return new 

-

      return None 

-

 

-

 

-

  def apply(self, strip=0, root=None): 

-

    """ Apply parsed patch, optionally stripping leading components 

-

        from file paths. `root` parameter specifies working dir. 

-

        return True on success 

-

    """ 

-

    if root: 

-

      prevdir = os.getcwd() 

-

      os.chdir(root) 

-

 

-

    total = len(self.items) 

-

    errors = 0 

-

    if strip: 

-

      # [ ] test strip level exceeds nesting level 

-

      #   [ ] test the same only for selected files 

-

      #     [ ] test if files end up being on the same level 

-

      try: 

-

        strip = int(strip) 

-

      except ValueError: 

-

        errors += 1 

-

        warning("error: strip parameter '%s' must be an integer" % strip) 

-

        strip = 0 

-

 

-

    #for fileno, filename in enumerate(self.source): 

-

    for i,p in enumerate(self.items): 

-

      if strip: 

-

        debug("stripping %s leading component(s) from:" % strip) 

-

        debug("   %s" % p.source) 

-

        debug("   %s" % p.target) 

-

        old = pathstrip(p.source, strip) 

-

        new = pathstrip(p.target, strip) 

-

      else: 

-

        old, new = p.source, p.target 

-

 

-

      filename = self.findfile(old, new) 

-

 

-

      if not filename: 

-

          warning("source/target file does not exist:\n  --- %s\n  +++ %s" % (old, new)) 

-

          errors += 1 

-

          continue 

-

      if not isfile(filename): 

-

        warning("not a file - %s" % filename) 

-

        errors += 1 

-

        continue 

-

 

-

      # [ ] check absolute paths security here 

-

      debug("processing %d/%d:\t %s" % (i+1, total, filename)) 

-

 

-

      # validate before patching 

-

      f2fp = open(filename) 

-

      hunkno = 0 

-

      hunk = p.hunks[hunkno] 

-

      hunkfind = [] 

-

      hunkreplace = [] 

-

      validhunks = 0 

-

      canpatch = False 

-

      for lineno, line in enumerate(f2fp): 

-

        if lineno+1 < hunk.startsrc: 

-

          continue 

-

        elif lineno+1 == hunk.startsrc: 

-

          hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"] 

-

          hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"] 

-

          #pprint(hunkreplace) 

-

          hunklineno = 0 

-

 

-

          # todo \ No newline at end of file 

-

 

-

        # check hunks in source file 

-

        if lineno+1 < hunk.startsrc+len(hunkfind)-1: 

-

          if line.rstrip("\r\n") == hunkfind[hunklineno]: 

-

            hunklineno+=1 

-

          else: 

-

            info("file %d/%d:\t %s" % (i+1, total, filename)) 

-

            info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno)) 

-

            info("  expected: %s" % hunkfind[hunklineno]) 

-

            info("  actual  : %s" % line.rstrip("\r\n")) 

-

            # not counting this as error, because file may already be patched. 

-

            # check if file is already patched is done after the number of 

-

            # invalid hunks if found 

-

            # TODO: check hunks against source/target file in one pass 

-

            #   API - check(stream, srchunks, tgthunks) 

-

            #           return tuple (srcerrs, tgterrs) 

-

 

-

            # continue to check other hunks for completeness 

-

            hunkno += 1 

-

            if hunkno < len(p.hunks): 

-

              hunk = p.hunks[hunkno] 

-

              continue 

-

            else: 

-

              break 

-

 

-

        # check if processed line is the last line 

-

        if lineno+1 == hunk.startsrc+len(hunkfind)-1: 

-

          debug(" hunk no.%d for file %s  -- is ready to be patched" % (hunkno+1, filename)) 

-

          hunkno+=1 

-

          validhunks+=1 

-

          if hunkno < len(p.hunks): 

-

            hunk = p.hunks[hunkno] 

-

          else: 

-

            if validhunks == len(p.hunks): 

-

              # patch file 

-

              canpatch = True 

-

              break 

-

      else: 

-

        if hunkno < len(p.hunks): 

-

          warning("premature end of source file %s at hunk %d" % (filename, hunkno+1)) 

-

          errors += 1 

-

 

-

      f2fp.close() 

-

 

-

      if validhunks < len(p.hunks): 

-

        if self._match_file_hunks(filename, p.hunks): 

-

          warning("already patched  %s" % filename) 

-

        else: 

-

          warning("source file is different - %s" % filename) 

-

          errors += 1 

-

      if canpatch: 

-

        backupname = filename+".orig" 

-

        if exists(backupname): 

-

          warning("can't backup original file to %s - aborting" % backupname) 

-

        else: 

-

          import shutil 

-

          shutil.move(filename, backupname) 

-

          if self.write_hunks(backupname, filename, p.hunks): 

-

            info("successfully patched %d/%d:\t %s" % (i+1, total, filename)) 

-

            os.unlink(backupname) 

-

          else: 

-

            errors += 1 

-

            warning("error patching file %s" % filename) 

-

            shutil.copy(filename, filename+".invalid") 

-

            warning("invalid version is saved to %s" % filename+".invalid") 

-

            # todo: proper rejects 

-

            shutil.move(backupname, filename) 

-

 

-

    if root: 

-

      os.chdir(prevdir) 

-

 

-

    # todo: check for premature eof 

-

    return (errors == 0) 

-

 

-

 

-

  def _reverse(self): 

-

    """ reverse patch direction (this doesn't touch filenames) """ 

-

    for p in self.items: 

-

      for h in p.hunks: 

-

        h.startsrc, h.starttgt = h.starttgt, h.startsrc 

-

        h.linessrc, h.linestgt = h.linestgt, h.linessrc 

-

        for i,line in enumerate(h.text): 

-

          if line[0] == '+': 

-

            h.text[i] = '-' + line[1:] 

-

          elif line[0] == '-': 

-

            h.text[i] = '+' +line[1:] 

-

 

-

  def revert(self, strip=0, root=None): 

-

    """ apply patch in reverse order """ 

-

    reverted = copy.deepcopy(self) 

-

    reverted._reverse() 

-

    return reverted.apply(strip, root) 

-

 

-

 

-

  def can_patch(self, filename): 

-

    """ Check if specified filename can be patched. Returns None if file can 

-

    not be found among source filenames. False if patch can not be applied 

-

    clearly. True otherwise. 

-

 

-

    :returns: True, False or None 

-

    """ 

-

    filename = abspath(filename) 

-

    for p in self.items: 

-

      if filename == abspath(p.source): 

-

        return self._match_file_hunks(filename, p.hunks) 

-

    return None 

-

 

-

 

-

  def _match_file_hunks(self, filepath, hunks): 

-

    matched = True 

-

    fp = open(abspath(filepath)) 

-

 

-

    class NoMatch(Exception): 

-

      pass 

-

 

-

    lineno = 1 

-

    line = fp.readline() 

-

    hno = None 

-

    try: 

-

      for hno, h in enumerate(hunks): 

-

        # skip to first line of the hunk 

-

        while lineno < h.starttgt: 

-

          if not len(line): # eof 

-

            debug("check failed - premature eof before hunk: %d" % (hno+1)) 

-

            raise NoMatch 

-

          line = fp.readline() 

-

          lineno += 1 

-

        for hline in h.text: 

-

          if hline.startswith("-"): 

-

            continue 

-

          if not len(line): 

-

            debug("check failed - premature eof on hunk: %d" % (hno+1)) 

-

            # todo: \ No newline at the end of file 

-

            raise NoMatch 

-

          if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"): 

-

            debug("file is not patched - failed hunk: %d" % (hno+1)) 

-

            raise NoMatch 

-

          line = fp.readline() 

-

          lineno += 1 

-

 

-

    except NoMatch: 

-

      matched = False 

-

      # todo: display failed hunk, i.e. expected/found 

-

 

-

    fp.close() 

-

    return matched 

-

 

-

 

-

  def patch_stream(self, instream, hunks): 

-

    """ Generator that yields stream patched with hunks iterable 

-

     

-

        Converts lineends in hunk lines to the best suitable format 

-

        autodetected from input 

-

    """ 

-

 

-

    # todo: At the moment substituted lineends may not be the same 

-

    #       at the start and at the end of patching. Also issue a 

-

    #       warning/throw about mixed lineends (is it really needed?) 

-

 

-

    hunks = iter(hunks) 

-

 

-

    srclineno = 1 

-

 

-

    lineends = {'\n':0, '\r\n':0, '\r':0} 

-

    def get_line(): 

-

      """ 

-

      local utility function - return line from source stream 

-

      collecting line end statistics on the way 

-

      """ 

-

      line = instream.readline() 

-

        # 'U' mode works only with text files 

-

      if line.endswith("\r\n"): 

-

        lineends["\r\n"] += 1 

-

      elif line.endswith("\n"): 

-

        lineends["\n"] += 1 

-

      elif line.endswith("\r"): 

-

        lineends["\r"] += 1 

-

      return line 

-

 

-

    for hno, h in enumerate(hunks): 

-

      debug("hunk %d" % (hno+1)) 

-

      # skip to line just before hunk starts 

-

      while srclineno < h.startsrc: 

-

        yield get_line() 

-

        srclineno += 1 

-

 

-

      for hline in h.text: 

-

        # todo: check \ No newline at the end of file 

-

        if hline.startswith("-") or hline.startswith("\\"): 

-

          get_line() 

-

          srclineno += 1 

-

          continue 

-

        else: 

-

          if not hline.startswith("+"): 

-

            get_line() 

-

            srclineno += 1 

-

          line2write = hline[1:] 

-

          # detect if line ends are consistent in source file 

-

          if sum([bool(lineends[x]) for x in lineends]) == 1: 

-

            newline = [x for x in lineends if lineends[x] != 0][0] 

-

            yield line2write.rstrip("\r\n")+newline 

-

          else: # newlines are mixed 

-

            yield line2write 

-

 

-

    for line in instream: 

-

      yield line 

-

 

-

 

-

  def write_hunks(self, srcname, tgtname, hunks): 

-

    src = open(srcname, "rb") 

-

    tgt = open(tgtname, "wb") 

-

 

-

    debug("processing target file %s" % tgtname) 

-

 

-

    tgt.writelines(self.patch_stream(src, hunks)) 

-

 

-

    tgt.close() 

-

    src.close() 

-

    # [ ] TODO: add test for permission copy 

-

    shutil.copymode(srcname, tgtname) 

-

    return True 

-

 

-

 

-

  def dump(self): 

-

    for p in self.items: 

-

      for headline in p.header: 

-

        print headline.rstrip('\n') 

-

      print '--- ' + p.source 

-

      print '+++ ' + p.target 

-

      for h in p.hunks: 

-

        print '@@ -%s,%s +%s,%s @@' % (h.startsrc, h.linessrc, h.starttgt, h.linestgt) 

-

        for line in h.text: 

-

          print line.rstrip('\n') 

-

 

-

 

-

if __name__ == "__main__": 

-

  from optparse import OptionParser 

-

  from os.path import exists 

-

  import sys 

-

 

-

  opt = OptionParser(usage="1. %prog [options] unified.diff\n" 

-

                    "       2. %prog [options] http://host/patch\n" 

-

                    "       3. %prog [options] -- < unified.diff", 

-

                     version="python-patch %s" % __version__) 

-

  opt.add_option("-q", "--quiet", action="store_const", dest="verbosity", 

-

                                  const=0, help="print only warnings and errors", default=1) 

-

  opt.add_option("-v", "--verbose", action="store_const", dest="verbosity", 

-

                                  const=2, help="be verbose") 

-

  opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode") 

-

  opt.add_option("--diffstat", action="store_true", dest="diffstat", 

-

                                           help="print diffstat and exit") 

-

  opt.add_option("-d", "--directory", metavar='DIR', 

-

                                           help="specify root directory for applying patch") 

-

  opt.add_option("-p", "--strip", type="int", metavar='N', default=0, 

-

                                           help="strip N path components from filenames") 

-

  opt.add_option("--revert", action="store_true", 

-

                                           help="apply patch in reverse order (unpatch)") 

-

  (options, args) = opt.parse_args() 

-

 

-

  if not args and sys.argv[-1:] != ['--']: 

-

    opt.print_version() 

-

    opt.print_help() 

-

    sys.exit() 

-

  readstdin = (sys.argv[-1:] == ['--'] and not args) 

-

 

-

  debugmode = options.debugmode 

-

 

-

  verbosity_levels = {0:logging.WARNING, 1:logging.INFO, 2:logging.DEBUG} 

-

  loglevel = verbosity_levels[options.verbosity] 

-

  logformat = "%(message)s" 

-

  if debugmode: 

-

    loglevel = logging.DEBUG 

-

    logformat = "%(levelname)8s %(message)s" 

-

  logger.setLevel(loglevel) 

-

  loghandler = logging.StreamHandler() 

-

  loghandler.setFormatter(logging.Formatter(logformat)) 

-

  logger.addHandler(loghandler) 

-

 

-

 

-

  if readstdin: 

-

    patch = PatchSet(sys.stdin) 

-

  else: 

-

    patchfile = args[0] 

-

    urltest = patchfile.split(':')[0] 

-

    if (':' in patchfile and urltest.isalpha() 

-

        and len(urltest) > 1): # one char before : is a windows drive letter 

-

      patch = fromurl(patchfile) 

-

    else: 

-

      if not exists(patchfile) or not isfile(patchfile): 

-

        sys.exit("patch file does not exist - %s" % patchfile) 

-

      patch = fromfile(patchfile) 

-

 

-

  if options.diffstat: 

-

    print patch.diffstat() 

-

    sys.exit(0) 

-

 

-

  #pprint(patch) 

-

  if options.revert: 

-

    patch.revert(options.strip, root=options.directory) or sys.exit(-1) 

-

  else: 

-

    patch.apply(options.strip, root=options.directory) or sys.exit(-1) 

-

 

-

  # todo: document and test line ends handling logic - patch.py detects proper line-endings 

-

  #       for inserted hunks and issues a warning if patched file has incosistent line ends 

-

 

-

 

-

# Legend: 

-

# [ ]  - some thing to be done 

-

# [w]  - official wart, external or internal that is unlikely to be fixed 

-

 

-

# [ ] API break (2.x) wishlist 

-

# PatchSet.items  -->  PatchSet.patches 

-

 

-

# [ ] run --revert test for all dataset items 

-

# [ ] run .parse() / .dump() test for dataset 

- -
-
- - - - - diff --git a/tests/coverage/coverage_html.js b/tests/coverage/coverage_html.js deleted file mode 100644 index b24006d..0000000 --- a/tests/coverage/coverage_html.js +++ /dev/null @@ -1,376 +0,0 @@ -// Coverage.py HTML report browser code. -/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ -/*global coverage: true, document, window, $ */ - -coverage = {}; - -// Find all the elements with shortkey_* class, and use them to assign a shotrtcut key. -coverage.assign_shortkeys = function () { - $("*[class*='shortkey_']").each(function (i, e) { - $.each($(e).attr("class").split(" "), function (i, c) { - if (/^shortkey_/.test(c)) { - $(document).bind('keydown', c.substr(9), function () { - $(e).click(); - }); - } - }); - }); -}; - -// Create the events for the help panel. -coverage.wire_up_help_panel = function () { - $("#keyboard_icon").click(function () { - // Show the help panel, and position it so the keyboard icon in the - // panel is in the same place as the keyboard icon in the header. - $(".help_panel").show(); - var koff = $("#keyboard_icon").offset(); - var poff = $("#panel_icon").position(); - $(".help_panel").offset({ - top: koff.top-poff.top, - left: koff.left-poff.left - }); - }); - $("#panel_icon").click(function () { - $(".help_panel").hide(); - }); -}; - -// Loaded on index.html -coverage.index_ready = function ($) { - // Look for a cookie containing previous sort settings: - var sort_list = []; - var cookie_name = "COVERAGE_INDEX_SORT"; - var i; - - // This almost makes it worth installing the jQuery cookie plugin: - if (document.cookie.indexOf(cookie_name) > -1) { - var cookies = document.cookie.split(";"); - for (i = 0; i < cookies.length; i++) { - var parts = cookies[i].split("="); - - if ($.trim(parts[0]) === cookie_name && parts[1]) { - sort_list = eval("[[" + parts[1] + "]]"); - break; - } - } - } - - // Create a new widget which exists only to save and restore - // the sort order: - $.tablesorter.addWidget({ - id: "persistentSort", - - // Format is called by the widget before displaying: - format: function (table) { - if (table.config.sortList.length === 0 && sort_list.length > 0) { - // This table hasn't been sorted before - we'll use - // our stored settings: - $(table).trigger('sorton', [sort_list]); - } - else { - // This is not the first load - something has - // already defined sorting so we'll just update - // our stored value to match: - sort_list = table.config.sortList; - } - } - }); - - // Configure our tablesorter to handle the variable number of - // columns produced depending on report options: - var headers = []; - var col_count = $("table.index > thead > tr > th").length; - - headers[0] = { sorter: 'text' }; - for (i = 1; i < col_count-1; i++) { - headers[i] = { sorter: 'digit' }; - } - headers[col_count-1] = { sorter: 'percent' }; - - // Enable the table sorter: - $("table.index").tablesorter({ - widgets: ['persistentSort'], - headers: headers - }); - - coverage.assign_shortkeys(); - coverage.wire_up_help_panel(); - - // Watch for page unload events so we can save the final sort settings: - $(window).unload(function () { - document.cookie = cookie_name + "=" + sort_list.toString() + "; path=/"; - }); -}; - -// -- pyfile stuff -- - -coverage.pyfile_ready = function ($) { - // If we're directed to a particular line number, highlight the line. - var frag = location.hash; - if (frag.length > 2 && frag[1] === 'n') { - $(frag).addClass('highlight'); - coverage.set_sel(parseInt(frag.substr(2), 10)); - } - else { - coverage.set_sel(0); - } - - $(document) - .bind('keydown', 'j', coverage.to_next_chunk_nicely) - .bind('keydown', 'k', coverage.to_prev_chunk_nicely) - .bind('keydown', '0', coverage.to_top) - .bind('keydown', '1', coverage.to_first_chunk) - ; - - $(".button_toggle_run").click(function (evt) {coverage.toggle_lines(evt.target, "run");}); - $(".button_toggle_exc").click(function (evt) {coverage.toggle_lines(evt.target, "exc");}); - $(".button_toggle_mis").click(function (evt) {coverage.toggle_lines(evt.target, "mis");}); - $(".button_toggle_par").click(function (evt) {coverage.toggle_lines(evt.target, "par");}); - - coverage.assign_shortkeys(); - coverage.wire_up_help_panel(); -}; - -coverage.toggle_lines = function (btn, cls) { - btn = $(btn); - var hide = "hide_"+cls; - if (btn.hasClass(hide)) { - $("#source ."+cls).removeClass(hide); - btn.removeClass(hide); - } - else { - $("#source ."+cls).addClass(hide); - btn.addClass(hide); - } -}; - -// Return the nth line div. -coverage.line_elt = function (n) { - return $("#t" + n); -}; - -// Return the nth line number div. -coverage.num_elt = function (n) { - return $("#n" + n); -}; - -// Return the container of all the code. -coverage.code_container = function () { - return $(".linenos"); -}; - -// Set the selection. b and e are line numbers. -coverage.set_sel = function (b, e) { - // The first line selected. - coverage.sel_begin = b; - // The next line not selected. - coverage.sel_end = (e === undefined) ? b+1 : e; -}; - -coverage.to_top = function () { - coverage.set_sel(0, 1); - coverage.scroll_window(0); -}; - -coverage.to_first_chunk = function () { - coverage.set_sel(0, 1); - coverage.to_next_chunk(); -}; - -coverage.is_transparent = function (color) { - // Different browsers return different colors for "none". - return color === "transparent" || color === "rgba(0, 0, 0, 0)"; -}; - -coverage.to_next_chunk = function () { - var c = coverage; - - // Find the start of the next colored chunk. - var probe = c.sel_end; - while (true) { - var probe_line = c.line_elt(probe); - if (probe_line.length === 0) { - return; - } - var color = probe_line.css("background-color"); - if (!c.is_transparent(color)) { - break; - } - probe++; - } - - // There's a next chunk, `probe` points to it. - var begin = probe; - - // Find the end of this chunk. - var next_color = color; - while (next_color === color) { - probe++; - probe_line = c.line_elt(probe); - next_color = probe_line.css("background-color"); - } - c.set_sel(begin, probe); - c.show_selection(); -}; - -coverage.to_prev_chunk = function () { - var c = coverage; - - // Find the end of the prev colored chunk. - var probe = c.sel_begin-1; - var probe_line = c.line_elt(probe); - if (probe_line.length === 0) { - return; - } - var color = probe_line.css("background-color"); - while (probe > 0 && c.is_transparent(color)) { - probe--; - probe_line = c.line_elt(probe); - if (probe_line.length === 0) { - return; - } - color = probe_line.css("background-color"); - } - - // There's a prev chunk, `probe` points to its last line. - var end = probe+1; - - // Find the beginning of this chunk. - var prev_color = color; - while (prev_color === color) { - probe--; - probe_line = c.line_elt(probe); - prev_color = probe_line.css("background-color"); - } - c.set_sel(probe+1, end); - c.show_selection(); -}; - -// Return the line number of the line nearest pixel position pos -coverage.line_at_pos = function (pos) { - var l1 = coverage.line_elt(1), - l2 = coverage.line_elt(2), - result; - if (l1.length && l2.length) { - var l1_top = l1.offset().top, - line_height = l2.offset().top - l1_top, - nlines = (pos - l1_top) / line_height; - if (nlines < 1) { - result = 1; - } - else { - result = Math.ceil(nlines); - } - } - else { - result = 1; - } - return result; -}; - -// Returns 0, 1, or 2: how many of the two ends of the selection are on -// the screen right now? -coverage.selection_ends_on_screen = function () { - if (coverage.sel_begin === 0) { - return 0; - } - - var top = coverage.line_elt(coverage.sel_begin); - var next = coverage.line_elt(coverage.sel_end-1); - - return ( - (top.isOnScreen() ? 1 : 0) + - (next.isOnScreen() ? 1 : 0) - ); -}; - -coverage.to_next_chunk_nicely = function () { - coverage.finish_scrolling(); - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: select the top line on - // the screen. - var win = $(window); - coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop())); - } - coverage.to_next_chunk(); -}; - -coverage.to_prev_chunk_nicely = function () { - coverage.finish_scrolling(); - if (coverage.selection_ends_on_screen() === 0) { - var win = $(window); - coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop() + win.height())); - } - coverage.to_prev_chunk(); -}; - -// Select line number lineno, or if it is in a colored chunk, select the -// entire chunk -coverage.select_line_or_chunk = function (lineno) { - var c = coverage; - var probe_line = c.line_elt(lineno); - if (probe_line.length === 0) { - return; - } - var the_color = probe_line.css("background-color"); - if (!c.is_transparent(the_color)) { - // The line is in a highlighted chunk. - // Search backward for the first line. - var probe = lineno; - var color = the_color; - while (probe > 0 && color === the_color) { - probe--; - probe_line = c.line_elt(probe); - if (probe_line.length === 0) { - break; - } - color = probe_line.css("background-color"); - } - var begin = probe + 1; - - // Search forward for the last line. - probe = lineno; - color = the_color; - while (color === the_color) { - probe++; - probe_line = c.line_elt(probe); - color = probe_line.css("background-color"); - } - - coverage.set_sel(begin, probe); - } - else { - coverage.set_sel(lineno); - } -}; - -coverage.show_selection = function () { - var c = coverage; - - // Highlight the lines in the chunk - c.code_container().find(".highlight").removeClass("highlight"); - for (var probe = c.sel_begin; probe > 0 && probe < c.sel_end; probe++) { - c.num_elt(probe).addClass("highlight"); - } - - c.scroll_to_selection(); -}; - -coverage.scroll_to_selection = function () { - // Scroll the page if the chunk isn't fully visible. - if (coverage.selection_ends_on_screen() < 2) { - // Need to move the page. The html,body trick makes it scroll in all - // browsers, got it from http://stackoverflow.com/questions/3042651 - var top = coverage.line_elt(coverage.sel_begin); - var top_pos = parseInt(top.offset().top, 10); - coverage.scroll_window(top_pos - 30); - } -}; - -coverage.scroll_window = function (to_pos) { - $("html,body").animate({scrollTop: to_pos}, 200); -}; - -coverage.finish_scrolling = function () { - $("html,body").stop(true, true); -}; diff --git a/tests/coverage/index.html b/tests/coverage/index.html deleted file mode 100644 index 7ad3057..0000000 --- a/tests/coverage/index.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage report - - - - - - - - - - - - -
- -

Hot-keys on this page

-
-

- n - s - m - x - - c   change column sorting -

-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Modulestatementsmissingexcludedcoverage
Total927164082%
C:\__py\mylib\python-patch\patch673157077%
run_tests2547097%
-
- - - - - diff --git a/tests/coverage/jquery.hotkeys.js b/tests/coverage/jquery.hotkeys.js deleted file mode 100644 index 09b21e0..0000000 --- a/tests/coverage/jquery.hotkeys.js +++ /dev/null @@ -1,99 +0,0 @@ -/* - * jQuery Hotkeys Plugin - * Copyright 2010, John Resig - * Dual licensed under the MIT or GPL Version 2 licenses. - * - * Based upon the plugin by Tzury Bar Yochay: - * http://github.com/tzuryby/hotkeys - * - * Original idea by: - * Binny V A, http://www.openjs.com/scripts/events/keyboard_shortcuts/ -*/ - -(function(jQuery){ - - jQuery.hotkeys = { - version: "0.8", - - specialKeys: { - 8: "backspace", 9: "tab", 13: "return", 16: "shift", 17: "ctrl", 18: "alt", 19: "pause", - 20: "capslock", 27: "esc", 32: "space", 33: "pageup", 34: "pagedown", 35: "end", 36: "home", - 37: "left", 38: "up", 39: "right", 40: "down", 45: "insert", 46: "del", - 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7", - 104: "8", 105: "9", 106: "*", 107: "+", 109: "-", 110: ".", 111 : "/", - 112: "f1", 113: "f2", 114: "f3", 115: "f4", 116: "f5", 117: "f6", 118: "f7", 119: "f8", - 120: "f9", 121: "f10", 122: "f11", 123: "f12", 144: "numlock", 145: "scroll", 191: "/", 224: "meta" - }, - - shiftNums: { - "`": "~", "1": "!", "2": "@", "3": "#", "4": "$", "5": "%", "6": "^", "7": "&", - "8": "*", "9": "(", "0": ")", "-": "_", "=": "+", ";": ": ", "'": "\"", ",": "<", - ".": ">", "/": "?", "\\": "|" - } - }; - - function keyHandler( handleObj ) { - // Only care when a possible input has been specified - if ( typeof handleObj.data !== "string" ) { - return; - } - - var origHandler = handleObj.handler, - keys = handleObj.data.toLowerCase().split(" "); - - handleObj.handler = function( event ) { - // Don't fire in text-accepting inputs that we didn't directly bind to - if ( this !== event.target && (/textarea|select/i.test( event.target.nodeName ) || - event.target.type === "text") ) { - return; - } - - // Keypress represents characters, not special keys - var special = event.type !== "keypress" && jQuery.hotkeys.specialKeys[ event.which ], - character = String.fromCharCode( event.which ).toLowerCase(), - key, modif = "", possible = {}; - - // check combinations (alt|ctrl|shift+anything) - if ( event.altKey && special !== "alt" ) { - modif += "alt+"; - } - - if ( event.ctrlKey && special !== "ctrl" ) { - modif += "ctrl+"; - } - - // TODO: Need to make sure this works consistently across platforms - if ( event.metaKey && !event.ctrlKey && special !== "meta" ) { - modif += "meta+"; - } - - if ( event.shiftKey && special !== "shift" ) { - modif += "shift+"; - } - - if ( special ) { - possible[ modif + special ] = true; - - } else { - possible[ modif + character ] = true; - possible[ modif + jQuery.hotkeys.shiftNums[ character ] ] = true; - - // "$" can be triggered as "Shift+4" or "Shift+$" or just "$" - if ( modif === "shift+" ) { - possible[ jQuery.hotkeys.shiftNums[ character ] ] = true; - } - } - - for ( var i = 0, l = keys.length; i < l; i++ ) { - if ( possible[ keys[i] ] ) { - return origHandler.apply( this, arguments ); - } - } - }; - } - - jQuery.each([ "keydown", "keyup", "keypress" ], function() { - jQuery.event.special[ this ] = { add: keyHandler }; - }); - -})( jQuery ); diff --git a/tests/coverage/jquery.isonscreen.js b/tests/coverage/jquery.isonscreen.js deleted file mode 100644 index 0182ebd..0000000 --- a/tests/coverage/jquery.isonscreen.js +++ /dev/null @@ -1,53 +0,0 @@ -/* Copyright (c) 2010 - * @author Laurence Wheway - * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) - * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses. - * - * @version 1.2.0 - */ -(function($) { - jQuery.extend({ - isOnScreen: function(box, container) { - //ensure numbers come in as intgers (not strings) and remove 'px' is it's there - for(var i in box){box[i] = parseFloat(box[i])}; - for(var i in container){container[i] = parseFloat(container[i])}; - - if(!container){ - container = { - left: $(window).scrollLeft(), - top: $(window).scrollTop(), - width: $(window).width(), - height: $(window).height() - } - } - - if( box.left+box.width-container.left > 0 && - box.left < container.width+container.left && - box.top+box.height-container.top > 0 && - box.top < container.height+container.top - ) return true; - return false; - } - }) - - - jQuery.fn.isOnScreen = function (container) { - for(var i in container){container[i] = parseFloat(container[i])}; - - if(!container){ - container = { - left: $(window).scrollLeft(), - top: $(window).scrollTop(), - width: $(window).width(), - height: $(window).height() - } - } - - if( $(this).offset().left+$(this).width()-container.left > 0 && - $(this).offset().left < container.width+container.left && - $(this).offset().top+$(this).height()-container.top > 0 && - $(this).offset().top < container.height+container.top - ) return true; - return false; - } -})(jQuery); diff --git a/tests/coverage/jquery.min.js b/tests/coverage/jquery.min.js deleted file mode 100644 index c941a5f..0000000 --- a/tests/coverage/jquery.min.js +++ /dev/null @@ -1,166 +0,0 @@ -/*! - * jQuery JavaScript Library v1.4.3 - * http://jquery.com/ - * - * Copyright 2010, John Resig - * Dual licensed under the MIT or GPL Version 2 licenses. - * http://jquery.org/license - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * Copyright 2010, The Dojo Foundation - * Released under the MIT, BSD, and GPL Licenses. - * - * Date: Thu Oct 14 23:10:06 2010 -0400 - */ -(function(E,A){function U(){return false}function ba(){return true}function ja(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ga(a){var b,d,e=[],f=[],h,k,l,n,s,v,B,D;k=c.data(this,this.nodeType?"events":"__events__");if(typeof k==="function")k=k.events;if(!(a.liveFired===this||!k||!k.live||a.button&&a.type==="click")){if(a.namespace)D=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var H=k.live.slice(0);for(n=0;nd)break;a.currentTarget=f.elem;a.data=f.handleObj.data; -a.handleObj=f.handleObj;D=f.handleObj.origHandler.apply(f.elem,arguments);if(D===false||a.isPropagationStopped()){d=f.level;if(D===false)b=false}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(Ha,"`").replace(Ia,"&")}function ka(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Ja.test(b))return c.filter(b, -e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function la(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this,e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var k in e[h])c.event.add(this,h,e[h][k],e[h][k].data)}}})}function Ka(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)} -function ma(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?La:Ma,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a,"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function ca(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Na.test(a)?e(a,h):ca(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)? -e(a,""):c.each(b,function(f,h){ca(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(na.concat.apply([],na.slice(0,b)),function(){d[this]=a});return d}function oa(a){if(!da[a]){var b=c("<"+a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";da[a]=d}return da[a]}function ea(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var u=E.document,c=function(){function a(){if(!b.isReady){try{u.documentElement.doScroll("left")}catch(i){setTimeout(a, -1);return}b.ready()}}var b=function(i,r){return new b.fn.init(i,r)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,k=/\S/,l=/^\s+/,n=/\s+$/,s=/\W/,v=/\d/,B=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,D=/^[\],:{}\s]*$/,H=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,G=/(?:^|:|,)(?:\s*\[)+/g,M=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,j=/(msie) ([\w.]+)/,o=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false, -q=[],t,x=Object.prototype.toString,C=Object.prototype.hasOwnProperty,P=Array.prototype.push,N=Array.prototype.slice,R=String.prototype.trim,Q=Array.prototype.indexOf,L={};b.fn=b.prototype={init:function(i,r){var y,z,F;if(!i)return this;if(i.nodeType){this.context=this[0]=i;this.length=1;return this}if(i==="body"&&!r&&u.body){this.context=u;this[0]=u.body;this.selector="body";this.length=1;return this}if(typeof i==="string")if((y=h.exec(i))&&(y[1]||!r))if(y[1]){F=r?r.ownerDocument||r:u;if(z=B.exec(i))if(b.isPlainObject(r)){i= -[u.createElement(z[1])];b.fn.attr.call(i,r,true)}else i=[F.createElement(z[1])];else{z=b.buildFragment([y[1]],[F]);i=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this,i)}else{if((z=u.getElementById(y[2]))&&z.parentNode){if(z.id!==y[2])return f.find(i);this.length=1;this[0]=z}this.context=u;this.selector=i;return this}else if(!r&&!s.test(i)){this.selector=i;this.context=u;i=u.getElementsByTagName(i);return b.merge(this,i)}else return!r||r.jquery?(r||f).find(i):b(r).find(i); -else if(b.isFunction(i))return f.ready(i);if(i.selector!==A){this.selector=i.selector;this.context=i.context}return b.makeArray(i,this)},selector:"",jquery:"1.4.3",length:0,size:function(){return this.length},toArray:function(){return N.call(this,0)},get:function(i){return i==null?this.toArray():i<0?this.slice(i)[0]:this[i]},pushStack:function(i,r,y){var z=b();b.isArray(i)?P.apply(z,i):b.merge(z,i);z.prevObject=this;z.context=this.context;if(r==="find")z.selector=this.selector+(this.selector?" ": -"")+y;else if(r)z.selector=this.selector+"."+r+"("+y+")";return z},each:function(i,r){return b.each(this,i,r)},ready:function(i){b.bindReady();if(b.isReady)i.call(u,b);else q&&q.push(i);return this},eq:function(i){return i===-1?this.slice(i):this.slice(i,+i+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(i){return this.pushStack(b.map(this,function(r,y){return i.call(r, -y,r)}))},end:function(){return this.prevObject||b(null)},push:P,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var i=arguments[0]||{},r=1,y=arguments.length,z=false,F,I,K,J,fa;if(typeof i==="boolean"){z=i;i=arguments[1]||{};r=2}if(typeof i!=="object"&&!b.isFunction(i))i={};if(y===r){i=this;--r}for(;r0)){if(q){for(var r=0;i=q[r++];)i.call(u,b);q=null}b.fn.triggerHandler&&b(u).triggerHandler("ready")}}},bindReady:function(){if(!p){p=true;if(u.readyState==="complete")return setTimeout(b.ready, -1);if(u.addEventListener){u.addEventListener("DOMContentLoaded",t,false);E.addEventListener("load",b.ready,false)}else if(u.attachEvent){u.attachEvent("onreadystatechange",t);E.attachEvent("onload",b.ready);var i=false;try{i=E.frameElement==null}catch(r){}u.documentElement.doScroll&&i&&a()}}},isFunction:function(i){return b.type(i)==="function"},isArray:Array.isArray||function(i){return b.type(i)==="array"},isWindow:function(i){return i&&typeof i==="object"&&"setInterval"in i},isNaN:function(i){return i== -null||!v.test(i)||isNaN(i)},type:function(i){return i==null?String(i):L[x.call(i)]||"object"},isPlainObject:function(i){if(!i||b.type(i)!=="object"||i.nodeType||b.isWindow(i))return false;if(i.constructor&&!C.call(i,"constructor")&&!C.call(i.constructor.prototype,"isPrototypeOf"))return false;for(var r in i);return r===A||C.call(i,r)},isEmptyObject:function(i){for(var r in i)return false;return true},error:function(i){throw i;},parseJSON:function(i){if(typeof i!=="string"||!i)return null;i=b.trim(i); -if(D.test(i.replace(H,"@").replace(w,"]").replace(G,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(i):(new Function("return "+i))();else b.error("Invalid JSON: "+i)},noop:function(){},globalEval:function(i){if(i&&k.test(i)){var r=u.getElementsByTagName("head")[0]||u.documentElement,y=u.createElement("script");y.type="text/javascript";if(b.support.scriptEval)y.appendChild(u.createTextNode(i));else y.text=i;r.insertBefore(y,r.firstChild);r.removeChild(y)}},nodeName:function(i,r){return i.nodeName&&i.nodeName.toUpperCase()=== -r.toUpperCase()},each:function(i,r,y){var z,F=0,I=i.length,K=I===A||b.isFunction(i);if(y)if(K)for(z in i){if(r.apply(i[z],y)===false)break}else for(;F";a=u.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var s=u.createElement("div"); -s.style.width=s.style.paddingLeft="1px";u.body.appendChild(s);c.boxModel=c.support.boxModel=s.offsetWidth===2;if("zoom"in s.style){s.style.display="inline";s.style.zoom=1;c.support.inlineBlockNeedsLayout=s.offsetWidth===2;s.style.display="";s.innerHTML="
";c.support.shrinkWrapBlocks=s.offsetWidth!==2}s.innerHTML="
t
";var v=s.getElementsByTagName("td");c.support.reliableHiddenOffsets=v[0].offsetHeight=== -0;v[0].style.display="";v[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&v[0].offsetHeight===0;s.innerHTML="";u.body.removeChild(s).style.display="none"});a=function(s){var v=u.createElement("div");s="on"+s;var B=s in v;if(!B){v.setAttribute(s,"return;");B=typeof v[s]==="function"}return B};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength", -cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var pa={},Oa=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?pa:a;var e=a.nodeType,f=e?a[c.expando]:null,h=c.cache;if(!(e&&!f&&typeof b==="string"&&d===A)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]= -c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==A)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?pa:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando);else if(d)delete f[e];else for(var k in a)delete a[k]}},acceptData:function(a){if(a.nodeName){var b= -c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){if(typeof a==="undefined")return this.length?c.data(this[0]):null;else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===A){var e=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(e===A&&this.length){e=c.data(this[0],a);if(e===A&&this[0].nodeType===1){e=this[0].getAttribute("data-"+a);if(typeof e=== -"string")try{e=e==="true"?true:e==="false"?false:e==="null"?null:!c.isNaN(e)?parseFloat(e):Oa.test(e)?c.parseJSON(e):e}catch(f){}else e=A}}return e===A&&d[1]?this.data(d[0]):e}else return this.each(function(){var h=c(this),k=[d[0],b];h.triggerHandler("setData"+d[1]+"!",k);c.data(this,a,b);h.triggerHandler("changeData"+d[1]+"!",k)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var e=c.data(a,b);if(!d)return e|| -[];if(!e||c.isArray(d))e=c.data(a,b,c.makeArray(d));else e.push(d);return e}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),e=d.shift();if(e==="inprogress")e=d.shift();if(e){b==="fx"&&d.unshift("inprogress");e.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===A)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this, -a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var qa=/[\n\t]/g,ga=/\s+/,Pa=/\r/g,Qa=/^(?:href|src|style)$/,Ra=/^(?:button|input)$/i,Sa=/^(?:button|input|object|select|textarea)$/i,Ta=/^a(?:rea)?$/i,ra=/^(?:radio|checkbox)$/i;c.fn.extend({attr:function(a,b){return c.access(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this, -a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(s){var v=c(this);v.addClass(a.call(this,s,v.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ga),d=0,e=this.length;d-1)return true;return false}, -val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one";if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h=0;else if(c.nodeName(this,"select")){var B=c.makeArray(v);c("option",this).each(function(){this.selected= -c.inArray(c(this).val(),B)>=0});if(!B.length)this.selectedIndex=-1}else this.value=v}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return A;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==A;b=e&&c.props[b]||b;if(a.nodeType===1){var h=Qa.test(b);if((b in a||a[b]!==A)&&e&&!h){if(f){b==="type"&&Ra.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); -if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:Sa.test(a.nodeName)||Ta.test(a.nodeName)&&a.href?0:A;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return A;a=!c.support.hrefNormalized&&e&& -h?a.getAttribute(b,2):a.getAttribute(b);return a===null?A:a}}});var X=/\.(.*)$/,ha=/^(?:textarea|input|select)$/i,Ha=/\./g,Ia=/ /g,Ua=/[^\w\s.|`]/g,Va=function(a){return a.replace(Ua,"\\$&")},sa={focusin:0,focusout:0};c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var k=a.nodeType?"events":"__events__",l=h[k],n=h.handle;if(typeof l=== -"function"){n=l.handle;l=l.events}else if(!l){a.nodeType||(h[k]=h=function(){});h.events=l={}}if(!n)h.handle=n=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(n.elem,arguments):A};n.elem=a;b=b.split(" ");for(var s=0,v;k=b[s++];){h=f?c.extend({},f):{handler:d,data:e};if(k.indexOf(".")>-1){v=k.split(".");k=v.shift();h.namespace=v.slice(0).sort().join(".")}else{v=[];h.namespace=""}h.type=k;if(!h.guid)h.guid=d.guid;var B=l[k],D=c.event.special[k]||{};if(!B){B=l[k]=[]; -if(!D.setup||D.setup.call(a,e,v,n)===false)if(a.addEventListener)a.addEventListener(k,n,false);else a.attachEvent&&a.attachEvent("on"+k,n)}if(D.add){D.add.call(a,h);if(!h.handler.guid)h.handler.guid=d.guid}B.push(h);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,k=0,l,n,s,v,B,D,H=a.nodeType?"events":"__events__",w=c.data(a),G=w&&w[H];if(w&&G){if(typeof G==="function"){w=G;G=G.events}if(b&&b.type){d=b.handler;b=b.type}if(!b|| -typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in G)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[k++];){v=f;l=f.indexOf(".")<0;n=[];if(!l){n=f.split(".");f=n.shift();s=RegExp("(^|\\.)"+c.map(n.slice(0).sort(),Va).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(B=G[f])if(d){v=c.event.special[f]||{};for(h=e||0;h=0){a.type= -f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return A;a.result=A;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)=== -false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){e=a.target;var k,l=f.replace(X,""),n=c.nodeName(e,"a")&&l==="click",s=c.event.special[l]||{};if((!s._default||s._default.call(d,a)===false)&&!n&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[l]){if(k=e["on"+l])e["on"+l]=null;c.event.triggered=true;e[l]()}}catch(v){}if(k)e["on"+l]=k;c.event.triggered=false}}},handle:function(a){var b,d,e; -d=[];var f,h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var k=d.length;f-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ha.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=va(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===A||f===e))if(e!=null||f){a.type="change";a.liveFired= -A;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",va(a))}},setup:function(){if(this.type=== -"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ha.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ha.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}u.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){sa[b]++===0&&u.addEventListener(a,d,true)},teardown:function(){--sa[b]=== -0&&u.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=A}var k=b==="one"?c.proxy(f,function(n){c(this).unbind(n,k);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var l=this.length;h0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}}); -(function(){function a(g,j,o,m,p,q){p=0;for(var t=m.length;p0){C=x;break}}x=x[g]}m[p]=C}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,k=true;[0,0].sort(function(){k=false;return 0});var l=function(g,j,o,m){o=o||[];var p=j=j||u;if(j.nodeType!==1&&j.nodeType!==9)return[];if(!g||typeof g!=="string")return o;var q=[],t,x,C,P,N=true,R=l.isXML(j),Q=g,L;do{d.exec("");if(t=d.exec(Q)){Q=t[3];q.push(t[1]);if(t[2]){P=t[3]; -break}}}while(t);if(q.length>1&&s.exec(g))if(q.length===2&&n.relative[q[0]])x=M(q[0]+q[1],j);else for(x=n.relative[q[0]]?[j]:l(q.shift(),j);q.length;){g=q.shift();if(n.relative[g])g+=q.shift();x=M(g,x)}else{if(!m&&q.length>1&&j.nodeType===9&&!R&&n.match.ID.test(q[0])&&!n.match.ID.test(q[q.length-1])){t=l.find(q.shift(),j,R);j=t.expr?l.filter(t.expr,t.set)[0]:t.set[0]}if(j){t=m?{expr:q.pop(),set:D(m)}:l.find(q.pop(),q.length===1&&(q[0]==="~"||q[0]==="+")&&j.parentNode?j.parentNode:j,R);x=t.expr?l.filter(t.expr, -t.set):t.set;if(q.length>0)C=D(x);else N=false;for(;q.length;){t=L=q.pop();if(n.relative[L])t=q.pop();else L="";if(t==null)t=j;n.relative[L](C,t,R)}}else C=[]}C||(C=x);C||l.error(L||g);if(f.call(C)==="[object Array]")if(N)if(j&&j.nodeType===1)for(g=0;C[g]!=null;g++){if(C[g]&&(C[g]===true||C[g].nodeType===1&&l.contains(j,C[g])))o.push(x[g])}else for(g=0;C[g]!=null;g++)C[g]&&C[g].nodeType===1&&o.push(x[g]);else o.push.apply(o,C);else D(C,o);if(P){l(P,p,o,m);l.uniqueSort(o)}return o};l.uniqueSort=function(g){if(w){h= -k;g.sort(w);if(h)for(var j=1;j0};l.find=function(g,j,o){var m;if(!g)return[];for(var p=0,q=n.order.length;p":function(g,j){var o=typeof j==="string",m,p=0,q=g.length;if(o&&!/\W/.test(j))for(j=j.toLowerCase();p=0))o||m.push(t);else if(o)j[q]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},CHILD:function(g){if(g[1]==="nth"){var j=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=j[1]+(j[2]||1)-0;g[3]=j[3]-0}g[0]=e++;return g},ATTR:function(g,j,o, -m,p,q){j=g[1].replace(/\\/g,"");if(!q&&n.attrMap[j])g[1]=n.attrMap[j];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,j,o,m,p){if(g[1]==="not")if((d.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=l(g[3],null,null,j);else{g=l.filter(g[3],j,o,true^p);o||m.push.apply(m,g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled=== -true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,j,o){return!!l(o[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"=== -g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},setFilters:{first:function(g,j){return j===0},last:function(g,j,o,m){return j===m.length-1},even:function(g,j){return j%2===0},odd:function(g,j){return j%2===1},lt:function(g,j,o){return jo[3]-0},nth:function(g,j,o){return o[3]- -0===j},eq:function(g,j,o){return o[3]-0===j}},filter:{PSEUDO:function(g,j,o,m){var p=j[1],q=n.filters[p];if(q)return q(g,o,j,m);else if(p==="contains")return(g.textContent||g.innerText||l.getText([g])||"").indexOf(j[3])>=0;else if(p==="not"){j=j[3];o=0;for(m=j.length;o=0}},ID:function(g,j){return g.nodeType===1&&g.getAttribute("id")===j},TAG:function(g,j){return j==="*"&&g.nodeType===1||g.nodeName.toLowerCase()=== -j},CLASS:function(g,j){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(j)>-1},ATTR:function(g,j){var o=j[1];o=n.attrHandle[o]?n.attrHandle[o](g):g[o]!=null?g[o]:g.getAttribute(o);var m=o+"",p=j[2],q=j[4];return o==null?p==="!=":p==="="?m===q:p==="*="?m.indexOf(q)>=0:p==="~="?(" "+m+" ").indexOf(q)>=0:!q?m&&o!==false:p==="!="?m!==q:p==="^="?m.indexOf(q)===0:p==="$="?m.substr(m.length-q.length)===q:p==="|="?m===q||m.substr(0,q.length+1)===q+"-":false},POS:function(g,j,o,m){var p=n.setFilters[j[2]]; -if(p)return p(g,o,j,m)}}},s=n.match.POS,v=function(g,j){return"\\"+(j-0+1)},B;for(B in n.match){n.match[B]=RegExp(n.match[B].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[B]=RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[B].source.replace(/\\(\d+)/g,v))}var D=function(g,j){g=Array.prototype.slice.call(g,0);if(j){j.push.apply(j,g);return j}return g};try{Array.prototype.slice.call(u.documentElement.childNodes,0)}catch(H){D=function(g,j){var o=j||[],m=0;if(f.call(g)==="[object Array]")Array.prototype.push.apply(o, -g);else if(typeof g.length==="number")for(var p=g.length;m";var o=u.documentElement;o.insertBefore(g,o.firstChild);if(u.getElementById(j)){n.find.ID=function(m,p,q){if(typeof p.getElementById!=="undefined"&&!q)return(p=p.getElementById(m[1]))?p.id===m[1]||typeof p.getAttributeNode!=="undefined"&&p.getAttributeNode("id").nodeValue===m[1]?[p]:A:[]};n.filter.ID=function(m,p){var q=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&q&&q.nodeValue===p}}o.removeChild(g); -o=g=null})();(function(){var g=u.createElement("div");g.appendChild(u.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(j,o){var m=o.getElementsByTagName(j[1]);if(j[1]==="*"){for(var p=[],q=0;m[q];q++)m[q].nodeType===1&&p.push(m[q]);m=p}return m};g.innerHTML="";if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(j){return j.getAttribute("href",2)};g=null})();u.querySelectorAll&& -function(){var g=l,j=u.createElement("div");j.innerHTML="

";if(!(j.querySelectorAll&&j.querySelectorAll(".TEST").length===0)){l=function(m,p,q,t){p=p||u;if(!t&&!l.isXML(p))if(p.nodeType===9)try{return D(p.querySelectorAll(m),q)}catch(x){}else if(p.nodeType===1&&p.nodeName.toLowerCase()!=="object"){var C=p.id,P=p.id="__sizzle__";try{return D(p.querySelectorAll("#"+P+" "+m),q)}catch(N){}finally{if(C)p.id=C;else p.removeAttribute("id")}}return g(m,p,q,t)};for(var o in g)l[o]=g[o]; -j=null}}();(function(){var g=u.documentElement,j=g.matchesSelector||g.mozMatchesSelector||g.webkitMatchesSelector||g.msMatchesSelector,o=false;try{j.call(u.documentElement,":sizzle")}catch(m){o=true}if(j)l.matchesSelector=function(p,q){try{if(o||!n.match.PSEUDO.test(q))return j.call(p,q)}catch(t){}return l(q,null,null,[p]).length>0}})();(function(){var g=u.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length=== -0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(j,o,m){if(typeof o.getElementsByClassName!=="undefined"&&!m)return o.getElementsByClassName(j[1])};g=null}}})();l.contains=u.documentElement.contains?function(g,j){return g!==j&&(g.contains?g.contains(j):true)}:function(g,j){return!!(g.compareDocumentPosition(j)&16)};l.isXML=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false};var M=function(g, -j){for(var o=[],m="",p,q=j.nodeType?[j]:j;p=n.match.PSEUDO.exec(g);){m+=p[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;p=0;for(var t=q.length;p0)for(var h=d;h0},closest:function(a, -b){var d=[],e,f,h=this[0];if(c.isArray(a)){var k={},l,n=1;if(h&&a.length){e=0;for(f=a.length;e-1:c(h).is(e))d.push({selector:l,elem:h,level:n})}h=h.parentNode;n++}}return d}k=$a.test(a)?c(a,b||this.context):null;e=0;for(f=this.length;e-1:c.find.matchesSelector(h,a)){d.push(h);break}else{h=h.parentNode;if(!h|| -!h.ownerDocument||h===b)break}d=d.length>1?c.unique(d):d;return this.pushStack(d,"closest",a)},index:function(a){if(!a||typeof a==="string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var d=typeof a==="string"?c(a,b||this.context):c.makeArray(a),e=c.merge(this.get(),d);return this.pushStack(!d[0]||!d[0].parentNode||d[0].parentNode.nodeType===11||!e[0]||!e[0].parentNode||e[0].parentNode.nodeType===11?e:c.unique(e))},andSelf:function(){return this.add(this.prevObject)}}); -c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling", -d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,e){var f=c.map(this,b,d);Wa.test(a)||(e=d);if(e&&typeof e==="string")f=c.filter(e,f);f=this.length>1?c.unique(f):f;if((this.length>1||Ya.test(e))&&Xa.test(a))f=f.reverse();return this.pushStack(f,a,Za.call(arguments).join(","))}}); -c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return b.length===1?c.find.matchesSelector(b[0],a)?[b[0]]:[]:c.find.matches(a,b)},dir:function(a,b,d){var e=[];for(a=a[b];a&&a.nodeType!==9&&(d===A||a.nodeType!==1||!c(a).is(d));){a.nodeType===1&&e.push(a);a=a[b]}return e},nth:function(a,b,d){b=b||1;for(var e=0;a;a=a[d])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var xa=/ jQuery\d+="(?:\d+|null)"/g, -$=/^\s+/,ya=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,za=/<([\w:]+)/,ab=/\s]+\/)>/g,O={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"], -area:[1,"",""],_default:[0,"",""]};O.optgroup=O.option;O.tbody=O.tfoot=O.colgroup=O.caption=O.thead;O.th=O.td;if(!c.support.htmlSerialize)O._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==A)return this.empty().append((this[0]&&this[0].ownerDocument||u).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this, -d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})}, -unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a= -c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,e;(e=this[d])!=null;d++)if(!a||c.filter(a,[e]).length){if(!b&&e.nodeType===1){c.cleanData(e.getElementsByTagName("*")); -c.cleanData([e])}e.parentNode&&e.parentNode.removeChild(e)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,e=this.ownerDocument;if(!d){d=e.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(xa,"").replace(cb,'="$1">').replace($, -"")],e)[0]}else return this.cloneNode(true)});if(a===true){la(this,b);la(this.find("*"),b.find("*"))}return b},html:function(a){if(a===A)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(xa,""):null;else if(typeof a==="string"&&!Aa.test(a)&&(c.support.leadingWhitespace||!$.test(a))&&!O[(za.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ya,"<$1>");try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?l.cloneNode(true):l)}k.length&&c.each(k,Ka)}return this}});c.buildFragment=function(a,b,d){var e,f,h;b=b&&b[0]?b[0].ownerDocument||b[0]:u;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===u&&!Aa.test(a[0])&&(c.support.checkClone|| -!Ba.test(a[0]))){f=true;if(h=c.fragments[a[0]])if(h!==1)e=h}if(!e){e=b.createDocumentFragment();c.clean(a,b,e,d)}if(f)c.fragments[a[0]]=h?e:1;return{fragment:e,cacheable:f}};c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var e=[];d=c(d);var f=this.length===1&&this[0].parentNode;if(f&&f.nodeType===11&&f.childNodes.length===1&&d.length===1){d[b](this[0]);return this}else{f=0;for(var h= -d.length;f0?this.clone(true):this).get();c(d[f])[b](k);e=e.concat(k)}return this.pushStack(e,a,d.selector)}}});c.extend({clean:function(a,b,d,e){b=b||u;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||u;for(var f=[],h=0,k;(k=a[h])!=null;h++){if(typeof k==="number")k+="";if(k){if(typeof k==="string"&&!bb.test(k))k=b.createTextNode(k);else if(typeof k==="string"){k=k.replace(ya,"<$1>");var l=(za.exec(k)||["",""])[1].toLowerCase(),n=O[l]||O._default, -s=n[0],v=b.createElement("div");for(v.innerHTML=n[1]+k+n[2];s--;)v=v.lastChild;if(!c.support.tbody){s=ab.test(k);l=l==="table"&&!s?v.firstChild&&v.firstChild.childNodes:n[1]===""&&!s?v.childNodes:[];for(n=l.length-1;n>=0;--n)c.nodeName(l[n],"tbody")&&!l[n].childNodes.length&&l[n].parentNode.removeChild(l[n])}!c.support.leadingWhitespace&&$.test(k)&&v.insertBefore(b.createTextNode($.exec(k)[0]),v.firstChild);k=v.childNodes}if(k.nodeType)f.push(k);else f=c.merge(f,k)}}if(d)for(h=0;f[h];h++)if(e&& -c.nodeName(f[h],"script")&&(!f[h].type||f[h].type.toLowerCase()==="text/javascript"))e.push(f[h].parentNode?f[h].parentNode.removeChild(f[h]):f[h]);else{f[h].nodeType===1&&f.splice.apply(f,[h+1,0].concat(c.makeArray(f[h].getElementsByTagName("script"))));d.appendChild(f[h])}return f},cleanData:function(a){for(var b,d,e=c.cache,f=c.event.special,h=c.support.deleteExpando,k=0,l;(l=a[k])!=null;k++)if(!(l.nodeName&&c.noData[l.nodeName.toLowerCase()]))if(d=l[c.expando]){if((b=e[d])&&b.events)for(var n in b.events)f[n]? -c.event.remove(l,n):c.removeEvent(l,n,b.handle);if(h)delete l[c.expando];else l.removeAttribute&&l.removeAttribute(c.expando);delete e[d]}}});var Ca=/alpha\([^)]*\)/i,db=/opacity=([^)]*)/,eb=/-([a-z])/ig,fb=/([A-Z])/g,Da=/^-?\d+(?:px)?$/i,gb=/^-?\d/,hb={position:"absolute",visibility:"hidden",display:"block"},La=["Left","Right"],Ma=["Top","Bottom"],W,ib=u.defaultView&&u.defaultView.getComputedStyle,jb=function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){if(arguments.length===2&&b===A)return this; -return c.access(this,a,b,true,function(d,e,f){return f!==A?c.style(d,e,f):c.css(d,e)})};c.extend({cssHooks:{opacity:{get:function(a,b){if(b){var d=W(a,"opacity","opacity");return d===""?"1":d}else return a.style.opacity}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true,zoom:true,lineHeight:true},cssProps:{"float":c.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,d,e){if(!(!a||a.nodeType===3||a.nodeType===8||!a.style)){var f,h=c.camelCase(b),k=a.style,l=c.cssHooks[h];b=c.cssProps[h]|| -h;if(d!==A){if(!(typeof d==="number"&&isNaN(d)||d==null)){if(typeof d==="number"&&!c.cssNumber[h])d+="px";if(!l||!("set"in l)||(d=l.set(a,d))!==A)try{k[b]=d}catch(n){}}}else{if(l&&"get"in l&&(f=l.get(a,false,e))!==A)return f;return k[b]}}},css:function(a,b,d){var e,f=c.camelCase(b),h=c.cssHooks[f];b=c.cssProps[f]||f;if(h&&"get"in h&&(e=h.get(a,true,d))!==A)return e;else if(W)return W(a,b,f)},swap:function(a,b,d){var e={},f;for(f in b){e[f]=a.style[f];a.style[f]=b[f]}d.call(a);for(f in b)a.style[f]= -e[f]},camelCase:function(a){return a.replace(eb,jb)}});c.curCSS=c.css;c.each(["height","width"],function(a,b){c.cssHooks[b]={get:function(d,e,f){var h;if(e){if(d.offsetWidth!==0)h=ma(d,b,f);else c.swap(d,hb,function(){h=ma(d,b,f)});return h+"px"}},set:function(d,e){if(Da.test(e)){e=parseFloat(e);if(e>=0)return e+"px"}else return e}}});if(!c.support.opacity)c.cssHooks.opacity={get:function(a,b){return db.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"": -b?"1":""},set:function(a,b){var d=a.style;d.zoom=1;var e=c.isNaN(b)?"":"alpha(opacity="+b*100+")",f=d.filter||"";d.filter=Ca.test(f)?f.replace(Ca,e):d.filter+" "+e}};if(ib)W=function(a,b,d){var e;d=d.replace(fb,"-$1").toLowerCase();if(!(b=a.ownerDocument.defaultView))return A;if(b=b.getComputedStyle(a,null)){e=b.getPropertyValue(d);if(e===""&&!c.contains(a.ownerDocument.documentElement,a))e=c.style(a,d)}return e};else if(u.documentElement.currentStyle)W=function(a,b){var d,e,f=a.currentStyle&&a.currentStyle[b], -h=a.style;if(!Da.test(f)&&gb.test(f)){d=h.left;e=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;h.left=b==="fontSize"?"1em":f||0;f=h.pixelLeft+"px";h.left=d;a.runtimeStyle.left=e}return f};if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=a.offsetHeight;return a.offsetWidth===0&&b===0||!c.support.reliableHiddenOffsets&&(a.style.display||c.css(a,"display"))==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var kb=c.now(),lb=/)<[^<]*)*<\/script>/gi, -mb=/^(?:select|textarea)/i,nb=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ob=/^(?:GET|HEAD|DELETE)$/,Na=/\[\]$/,T=/\=\?(&|$)/,ia=/\?/,pb=/([?&])_=[^&]*/,qb=/^(\w+:)?\/\/([^\/?#]+)/,rb=/%20/g,sb=/#.*$/,Ea=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!=="string"&&Ea)return Ea.apply(this,arguments);else if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var f=a.slice(e,a.length);a=a.slice(0,e)}e="GET";if(b)if(c.isFunction(b)){d= -b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);e="POST"}var h=this;c.ajax({url:a,type:e,dataType:"html",data:b,complete:function(k,l){if(l==="success"||l==="notmodified")h.html(f?c("
").append(k.responseText.replace(lb,"")).find(f):k.responseText);d&&h.each(d,[k.responseText,l,k])}});return this},serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&& -!this.disabled&&(this.checked||mb.test(this.nodeName)||nb.test(this.type))}).map(function(a,b){var d=c(this).val();return d==null?null:c.isArray(d)?c.map(d,function(e){return{name:b.name,value:e}}):{name:b.name,value:d}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:e})}, -getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:e})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return new E.XMLHttpRequest},accepts:{xml:"application/xml, text/xml",html:"text/html", -script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},ajax:function(a){var b=c.extend(true,{},c.ajaxSettings,a),d,e,f,h=b.type.toUpperCase(),k=ob.test(h);b.url=b.url.replace(sb,"");b.context=a&&a.context!=null?a.context:b;if(b.data&&b.processData&&typeof b.data!=="string")b.data=c.param(b.data,b.traditional);if(b.dataType==="jsonp"){if(h==="GET")T.test(b.url)||(b.url+=(ia.test(b.url)?"&":"?")+(b.jsonp||"callback")+"=?");else if(!b.data|| -!T.test(b.data))b.data=(b.data?b.data+"&":"")+(b.jsonp||"callback")+"=?";b.dataType="json"}if(b.dataType==="json"&&(b.data&&T.test(b.data)||T.test(b.url))){d=b.jsonpCallback||"jsonp"+kb++;if(b.data)b.data=(b.data+"").replace(T,"="+d+"$1");b.url=b.url.replace(T,"="+d+"$1");b.dataType="script";var l=E[d];E[d]=function(m){f=m;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);if(c.isFunction(l))l(m);else{E[d]=A;try{delete E[d]}catch(p){}}v&&v.removeChild(B)}}if(b.dataType==="script"&&b.cache===null)b.cache= -false;if(b.cache===false&&h==="GET"){var n=c.now(),s=b.url.replace(pb,"$1_="+n);b.url=s+(s===b.url?(ia.test(b.url)?"&":"?")+"_="+n:"")}if(b.data&&h==="GET")b.url+=(ia.test(b.url)?"&":"?")+b.data;b.global&&c.active++===0&&c.event.trigger("ajaxStart");n=(n=qb.exec(b.url))&&(n[1]&&n[1]!==location.protocol||n[2]!==location.host);if(b.dataType==="script"&&h==="GET"&&n){var v=u.getElementsByTagName("head")[0]||u.documentElement,B=u.createElement("script");if(b.scriptCharset)B.charset=b.scriptCharset;B.src= -b.url;if(!d){var D=false;B.onload=B.onreadystatechange=function(){if(!D&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){D=true;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);B.onload=B.onreadystatechange=null;v&&B.parentNode&&v.removeChild(B)}}}v.insertBefore(B,v.firstChild);return A}var H=false,w=b.xhr();if(w){b.username?w.open(h,b.url,b.async,b.username,b.password):w.open(h,b.url,b.async);try{if(b.data!=null&&!k||a&&a.contentType)w.setRequestHeader("Content-Type", -b.contentType);if(b.ifModified){c.lastModified[b.url]&&w.setRequestHeader("If-Modified-Since",c.lastModified[b.url]);c.etag[b.url]&&w.setRequestHeader("If-None-Match",c.etag[b.url])}n||w.setRequestHeader("X-Requested-With","XMLHttpRequest");w.setRequestHeader("Accept",b.dataType&&b.accepts[b.dataType]?b.accepts[b.dataType]+", */*; q=0.01":b.accepts._default)}catch(G){}if(b.beforeSend&&b.beforeSend.call(b.context,w,b)===false){b.global&&c.active--===1&&c.event.trigger("ajaxStop");w.abort();return false}b.global&& -c.triggerGlobal(b,"ajaxSend",[w,b]);var M=w.onreadystatechange=function(m){if(!w||w.readyState===0||m==="abort"){H||c.handleComplete(b,w,e,f);H=true;if(w)w.onreadystatechange=c.noop}else if(!H&&w&&(w.readyState===4||m==="timeout")){H=true;w.onreadystatechange=c.noop;e=m==="timeout"?"timeout":!c.httpSuccess(w)?"error":b.ifModified&&c.httpNotModified(w,b.url)?"notmodified":"success";var p;if(e==="success")try{f=c.httpData(w,b.dataType,b)}catch(q){e="parsererror";p=q}if(e==="success"||e==="notmodified")d|| -c.handleSuccess(b,w,e,f);else c.handleError(b,w,e,p);d||c.handleComplete(b,w,e,f);m==="timeout"&&w.abort();if(b.async)w=null}};try{var g=w.abort;w.abort=function(){w&&g.call&&g.call(w);M("abort")}}catch(j){}b.async&&b.timeout>0&&setTimeout(function(){w&&!H&&M("timeout")},b.timeout);try{w.send(k||b.data==null?null:b.data)}catch(o){c.handleError(b,w,null,o);c.handleComplete(b,w,e,f)}b.async||M();return w}},param:function(a,b){var d=[],e=function(h,k){k=c.isFunction(k)?k():k;d[d.length]=encodeURIComponent(h)+ -"="+encodeURIComponent(k)};if(b===A)b=c.ajaxSettings.traditional;if(c.isArray(a)||a.jquery)c.each(a,function(){e(this.name,this.value)});else for(var f in a)ca(f,a[f],b,e);return d.join("&").replace(rb,"+")}});c.extend({active:0,lastModified:{},etag:{},handleError:function(a,b,d,e){a.error&&a.error.call(a.context,b,d,e);a.global&&c.triggerGlobal(a,"ajaxError",[b,a,e])},handleSuccess:function(a,b,d,e){a.success&&a.success.call(a.context,e,d,b);a.global&&c.triggerGlobal(a,"ajaxSuccess",[b,a])},handleComplete:function(a, -b,d){a.complete&&a.complete.call(a.context,b,d);a.global&&c.triggerGlobal(a,"ajaxComplete",[b,a]);a.global&&c.active--===1&&c.event.trigger("ajaxStop")},triggerGlobal:function(a,b,d){(a.context&&a.context.url==null?c(a.context):c.event).trigger(b,d)},httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===1223}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),e=a.getResponseHeader("Etag"); -if(d)c.lastModified[b]=d;if(e)c.etag[b]=e;return a.status===304},httpData:function(a,b,d){var e=a.getResponseHeader("content-type")||"",f=b==="xml"||!b&&e.indexOf("xml")>=0;a=f?a.responseXML:a.responseText;f&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b==="json"||!b&&e.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&e.indexOf("javascript")>=0)c.globalEval(a);return a}});if(E.ActiveXObject)c.ajaxSettings.xhr= -function(){if(E.location.protocol!=="file:")try{return new E.XMLHttpRequest}catch(a){}try{return new E.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}};c.support.ajax=!!c.ajaxSettings.xhr();var da={},tb=/^(?:toggle|show|hide)$/,ub=/^([+\-]=)?([\d+.\-]+)(.*)$/,aa,na=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b,d){if(a||a===0)return this.animate(S("show",3),a,b,d);else{a= -0;for(b=this.length;a=0;e--)if(d[e].elem===this){b&&d[e](true);d.splice(e,1)}});b||this.dequeue();return this}});c.each({slideDown:S("show",1),slideUp:S("hide",1),slideToggle:S("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,e,f){return this.animate(b, -d,e,f)}});c.extend({speed:function(a,b,d){var e=a&&typeof a==="object"?c.extend({},a):{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};e.duration=c.fx.off?0:typeof e.duration==="number"?e.duration:e.duration in c.fx.speeds?c.fx.speeds[e.duration]:c.fx.speeds._default;e.old=e.complete;e.complete=function(){e.queue!==false&&c(this).dequeue();c.isFunction(e.old)&&e.old.call(this)};return e},easing:{linear:function(a,b,d,e){return d+e*a},swing:function(a,b,d,e){return(-Math.cos(a* -Math.PI)/2+0.5)*e+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||c.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a=parseFloat(c.css(this.elem,this.prop));return a&&a>-1E4?a:0},custom:function(a,b,d){function e(h){return f.step(h)} -this.startTime=c.now();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;this.pos=this.state=0;var f=this;a=c.fx;e.elem=this.elem;if(e()&&c.timers.push(e)&&!aa)aa=setInterval(a.tick,a.interval)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true; -this.custom(this.cur(),0)},step:function(a){var b=c.now(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var e in this.options.curAnim)if(this.options.curAnim[e]!==true)d=false;if(d){if(this.options.overflow!=null&&!c.support.shrinkWrapBlocks){var f=this.elem,h=this.options;c.each(["","X","Y"],function(l,n){f.style["overflow"+n]=h.overflow[l]})}this.options.hide&&c(this.elem).hide();if(this.options.hide|| -this.options.show)for(var k in this.options.curAnim)c.style(this.elem,k,this.options.orig[k]);this.options.complete.call(this.elem)}return false}else{a=b-this.startTime;this.state=a/this.options.duration;b=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||b](this.state,a,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a= -c.timers,b=0;b-1;e={};var s={};if(n)s=f.position();k=n?s.top:parseInt(k,10)||0;l=n?s.left:parseInt(l,10)||0;if(c.isFunction(b))b=b.call(a,d,h);if(b.top!=null)e.top=b.top-h.top+k;if(b.left!=null)e.left=b.left-h.left+l;"using"in b?b.using.call(a, -e):f.css(e)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),e=Fa.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.css(a,"marginTop"))||0;d.left-=parseFloat(c.css(a,"marginLeft"))||0;e.top+=parseFloat(c.css(b[0],"borderTopWidth"))||0;e.left+=parseFloat(c.css(b[0],"borderLeftWidth"))||0;return{top:d.top-e.top,left:d.left-e.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||u.body;a&&!Fa.test(a.nodeName)&& -c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(e){var f=this[0],h;if(!f)return null;if(e!==A)return this.each(function(){if(h=ea(this))h.scrollTo(!a?e:c(h).scrollLeft(),a?e:c(h).scrollTop());else this[d]=e});else return(h=ea(f))?"pageXOffset"in h?h[a?"pageYOffset":"pageXOffset"]:c.support.boxModel&&h.document.documentElement[d]||h.document.body[d]:f[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase(); -c.fn["inner"+b]=function(){return this[0]?parseFloat(c.css(this[0],d,"padding")):null};c.fn["outer"+b]=function(e){return this[0]?parseFloat(c.css(this[0],d,e?"margin":"border")):null};c.fn[d]=function(e){var f=this[0];if(!f)return e==null?null:this;if(c.isFunction(e))return this.each(function(h){var k=c(this);k[d](e.call(this,h,k[d]()))});return c.isWindow(f)?f.document.compatMode==="CSS1Compat"&&f.document.documentElement["client"+b]||f.document.body["client"+b]:f.nodeType===9?Math.max(f.documentElement["client"+ -b],f.body["scroll"+b],f.documentElement["scroll"+b],f.body["offset"+b],f.documentElement["offset"+b]):e===A?parseFloat(c.css(f,d)):this.css(d,typeof e==="string"?e:e+"px")}})})(window); diff --git a/tests/coverage/jquery.tablesorter.min.js b/tests/coverage/jquery.tablesorter.min.js deleted file mode 100644 index 64c7007..0000000 --- a/tests/coverage/jquery.tablesorter.min.js +++ /dev/null @@ -1,2 +0,0 @@ - -(function($){$.extend({tablesorter:new function(){var parsers=[],widgets=[];this.defaults={cssHeader:"header",cssAsc:"headerSortUp",cssDesc:"headerSortDown",sortInitialOrder:"asc",sortMultiSortKey:"shiftKey",sortForce:null,sortAppend:null,textExtraction:"simple",parsers:{},widgets:[],widgetZebra:{css:["even","odd"]},headers:{},widthFixed:false,cancelSelection:true,sortList:[],headerList:[],dateFormat:"us",decimal:'.',debug:false};function benchmark(s,d){log(s+","+(new Date().getTime()-d.getTime())+"ms");}this.benchmark=benchmark;function log(s){if(typeof console!="undefined"&&typeof console.debug!="undefined"){console.log(s);}else{alert(s);}}function buildParserCache(table,$headers){if(table.config.debug){var parsersDebug="";}var rows=table.tBodies[0].rows;if(table.tBodies[0].rows[0]){var list=[],cells=rows[0].cells,l=cells.length;for(var i=0;i1){arr=arr.concat(checkCellColSpan(table,headerArr,row++));}else{if(table.tHead.length==1||(cell.rowSpan>1||!r[row+1])){arr.push(cell);}}}return arr;};function checkHeaderMetadata(cell){if(($.metadata)&&($(cell).metadata().sorter===false)){return true;};return false;}function checkHeaderOptions(table,i){if((table.config.headers[i])&&(table.config.headers[i].sorter===false)){return true;};return false;}function applyWidget(table){var c=table.config.widgets;var l=c.length;for(var i=0;i');$("tr:first td",table.tBodies[0]).each(function(){colgroup.append($('
').css('width',$(this).width()));});$(table).prepend(colgroup);};}function updateHeaderSortCount(table,sortList){var c=table.config,l=sortList.length;for(var i=0;ib)?1:0));};function sortTextDesc(a,b){return((ba)?1:0));};function sortNumeric(a,b){return a-b;};function sortNumericDesc(a,b){return b-a;};function getCachedSortType(parsers,i){return parsers[i].type;};this.construct=function(settings){return this.each(function(){if(!this.tHead||!this.tBodies)return;var $this,$document,$headers,cache,config,shiftDown=0,sortOrder;this.config={};config=$.extend(this.config,$.tablesorter.defaults,settings);$this=$(this);$headers=buildHeaders(this);this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);var sortCSS=[config.cssDesc,config.cssAsc];fixColumnWidth(this);$headers.click(function(e){$this.trigger("sortStart");var totalRows=($this[0].tBodies[0]&&$this[0].tBodies[0].rows.length)||0;if(!this.sortDisabled&&totalRows>0){var $cell=$(this);var i=this.column;this.order=this.count++%2;if(!e[config.sortMultiSortKey]){config.sortList=[];if(config.sortForce!=null){var a=config.sortForce;for(var j=0;j0){$this.trigger("sorton",[config.sortList]);}applyWidget(this);});};this.addParser=function(parser){var l=parsers.length,a=true;for(var i=0;i - - - - - - - Coverage for run_tests: 97% - - - - - - - - - - - - -
- -

Hot-keys on this page

-
-

- r - m - x - p   toggle line displays -

-

- j - k   next/prev highlighted chunk -

-

- 0   (zero) top of page -

-

- 1   (one) first highlighted chunk -

-
-
- -
-
- - - - -
-

1

-

2

-

3

-

4

-

5

-

6

-

7

-

8

-

9

-

10

-

11

-

12

-

13

-

14

-

15

-

16

-

17

-

18

-

19

-

20

-

21

-

22

-

23

-

24

-

25

-

26

-

27

-

28

-

29

-

30

-

31

-

32

-

33

-

34

-

35

-

36

-

37

-

38

-

39

-

40

-

41

-

42

-

43

-

44

-

45

-

46

-

47

-

48

-

49

-

50

-

51

-

52

-

53

-

54

-

55

-

56

-

57

-

58

-

59

-

60

-

61

-

62

-

63

-

64

-

65

-

66

-

67

-

68

-

69

-

70

-

71

-

72

-

73

-

74

-

75

-

76

-

77

-

78

-

79

-

80

-

81

-

82

-

83

-

84

-

85

-

86

-

87

-

88

-

89

-

90

-

91

-

92

-

93

-

94

-

95

-

96

-

97

-

98

-

99

-

100

-

101

-

102

-

103

-

104

-

105

-

106

-

107

-

108

-

109

-

110

-

111

-

112

-

113

-

114

-

115

-

116

-

117

-

118

-

119

-

120

-

121

-

122

-

123

-

124

-

125

-

126

-

127

-

128

-

129

-

130

-

131

-

132

-

133

-

134

-

135

-

136

-

137

-

138

-

139

-

140

-

141

-

142

-

143

-

144

-

145

-

146

-

147

-

148

-

149

-

150

-

151

-

152

-

153

-

154

-

155

-

156

-

157

-

158

-

159

-

160

-

161

-

162

-

163

-

164

-

165

-

166

-

167

-

168

-

169

-

170

-

171

-

172

-

173

-

174

-

175

-

176

-

177

-

178

-

179

-

180

-

181

-

182

-

183

-

184

-

185

-

186

-

187

-

188

-

189

-

190

-

191

-

192

-

193

-

194

-

195

-

196

-

197

-

198

-

199

-

200

-

201

-

202

-

203

-

204

-

205

-

206

-

207

-

208

-

209

-

210

-

211

-

212

-

213

-

214

-

215

-

216

-

217

-

218

-

219

-

220

-

221

-

222

-

223

-

224

-

225

-

226

-

227

-

228

-

229

-

230

-

231

-

232

-

233

-

234

-

235

-

236

-

237

-

238

-

239

-

240

-

241

-

242

-

243

-

244

-

245

-

246

-

247

-

248

-

249

-

250

-

251

-

252

-

253

-

254

-

255

-

256

-

257

-

258

-

259

-

260

-

261

-

262

-

263

-

264

-

265

-

266

-

267

-

268

-

269

-

270

-

271

-

272

-

273

-

274

-

275

-

276

-

277

-

278

-

279

-

280

-

281

-

282

-

283

-

284

-

285

-

286

-

287

-

288

-

289

-

290

-

291

-

292

-

293

-

294

-

295

-

296

-

297

-

298

-

299

-

300

-

301

-

302

-

303

-

304

-

305

-

306

-

307

-

308

-

309

-

310

-

311

-

312

-

313

-

314

-

315

-

316

-

317

-

318

-

319

-

320

-

321

-

322

-

323

-

324

-

325

-

326

-

327

-

328

-

329

-

330

-

331

-

332

-

333

-

334

-

335

-

336

-

337

-

338

-

339

-

340

-

341

-

342

-

343

-

344

-

345

-

346

-

347

-

348

-

349

-

350

-

351

-

352

-

353

-

354

-

355

-

356

-

357

-

358

-

359

-

360

-

361

-

362

-

363

-

364

-

365

-

366

-

367

-

368

-

369

-

370

-

371

-

372

-

373

-

374

-

375

-

376

-

377

-

378

-

379

-

380

-

381

-

382

-

383

-

384

-

385

-

386

-

387

-

388

-

389

-

390

-

391

-

392

-

393

-

394

-

395

-

396

-

397

-

398

-

399

-

400

-

401

-

402

-

403

-

404

-

405

-

406

-

407

-

408

-

409

-

410

-

411

-

412

-

413

-

414

-

415

-

416

-

417

-

418

- -
-

#!/usr/bin/env python 

-

""" 

-

python-patch test suite 

-

 

-

There are two kind of tests: 

-

- file-based tests 

-

- directory-based tests 

-

- unit tests 

-

 

-

File-based test is patch file, initial file and resulting file 

-

for comparison. 

-

 

-

Directory-based test is a self-sufficient directory with: 

-

files to be patched, patch file itself and [result] dir. You can 

-

manually apply patch and compare outcome with [result] directory. 

-

This is what this test runner does. 

-

 

-

Unit tests test API and are all inside this runner. 

-

 

-

 

-

== Code Coverage == 

-

 

-

To refresh code coverage stats, get 'coverage' tool from 

-

http://pypi.python.org/pypi/coverage/ and run this file with: 

-

 

-

  coverage run run_tests.py 

-

  coverage html -d coverage 

-

 

-

On Windows it may be more convenient instead of `coverage` call 

-

`python -m coverage.__main__` 

-

""" 

-

 

-

import os 

-

import sys 

-

import re 

-

import shutil 

-

import unittest 

-

import copy 

-

from os import listdir 

-

from os.path import abspath, dirname, exists, join, isdir, isfile 

-

from tempfile import mkdtemp 

-

 

-

verbose = False 

-

if "-v" in sys.argv or "--verbose" in sys.argv: 

-

  verbose = True 

-

 

-

 

-

# full path for directory with tests 

-

tests_dir = dirname(abspath(__file__)) 

-

def testfile(name): 

-

  return join(tests_dir, 'data', name) 

-

 

-

 

-

# import patch.py from parent directory 

-

save_path = sys.path 

-

sys.path.insert(0, dirname(tests_dir)) 

-

import patch 

-

sys.path = save_path 

-

 

-

 

-

# ---------------------------------------------------------------------------- 

-

class TestPatchFiles(unittest.TestCase): 

-

  """ 

-

  unittest hack - test* methods are generated by add_test_methods() function 

-

  below dynamically using information about *.patch files from tests directory 

-

 

-

  """ 

-

  def _assert_files_equal(self, file1, file2): 

-

      f1 = f2 = None 

-

      try: 

-

        f1 = open(file1, "rb") 

-

        f2 = open(file2, "rb") 

-

        for line in f1: 

-

          self.assertEqual(line, f2.readline()) 

-

 

-

      finally: 

-

        if f2: 

-

          f2.close() 

-

        if f1: 

-

          f1.close() 

-

 

-

  def _assert_dirs_equal(self, dir1, dir2, ignore=[]): 

-

      """ 

-

      compare dir2 with reference dir1, ignoring entries 

-

      from supplied list 

-

 

-

      """ 

-

      # recursive 

-

      if type(ignore) == str: 

-

        ignore = [ignore] 

-

      e2list = [en for en in listdir(dir2) if en not in ignore] 

-

      for e1 in listdir(dir1): 

-

        if e1 in ignore: 

-

          continue 

-

        e1path = join(dir1, e1) 

-

        e2path = join(dir2, e1) 

-

        self.assert_(exists(e1path)) 

-

        self.assert_(exists(e2path), "%s does not exist" % e2path) 

-

        self.assert_(isdir(e1path) == isdir(e2path)) 

-

        if not isdir(e1path): 

-

          self._assert_files_equal(e1path, e2path) 

-

        else: 

-

          self._assert_dirs_equal(e1path, e2path, ignore=ignore) 

-

        e2list.remove(e1) 

-

      for e2 in e2list: 

-

        self.fail("extra file or directory: %s" % e2) 

-

 

-

 

-

  def _run_test(self, testname): 

-

      """ 

-

      boilerplate for running *.patch file tests 

-

      """ 

-

 

-

      # 1. create temp test directory 

-

      # 2. copy files 

-

      # 3. execute file-based patch  

-

      # 4. compare results 

-

      # 5. cleanup on success 

-

 

-

      tmpdir = mkdtemp(prefix="%s."%testname) 

-

 

-

      basepath = join(tests_dir, testname) 

-

      basetmp = join(tmpdir, testname) 

-

 

-

      patch_file = basetmp + ".patch" 

-

 

-

      file_based = isfile(basepath + ".from") 

-

      from_tgt = basetmp + ".from" 

-

 

-

      if file_based: 

-

        shutil.copy(basepath + ".from", tmpdir) 

-

        shutil.copy(basepath + ".patch", tmpdir) 

-

      else: 

-

        # directory-based 

-

        for e in listdir(basepath): 

-

          epath = join(basepath, e) 

-

          if not isdir(epath): 

-

            shutil.copy(epath, join(tmpdir, e)) 

-

          else: 

-

            shutil.copytree(epath, join(tmpdir, e)) 

-

 

-

 

-

      # 3. 

-

      # test utility as a whole 

-

      patch_tool = join(dirname(tests_dir), "patch.py") 

-

      save_cwd = os.getcwdu() 

-

      os.chdir(tmpdir) 

-

      if verbose: 

-

        cmd = '%s %s "%s"' % (sys.executable, patch_tool, patch_file) 

-

        print "\n"+cmd 

-

      else: 

-

        cmd = '%s %s -q "%s"' % (sys.executable, patch_tool, patch_file) 

-

      ret = os.system(cmd) 

-

      assert ret == 0, "Error %d running test %s" % (ret, testname) 

-

      os.chdir(save_cwd) 

-

 

-

 

-

      # 4. 

-

      # compare results 

-

      if file_based: 

-

        self._assert_files_equal(basepath + ".to", from_tgt) 

-

      else: 

-

        # recursive comparison 

-

        self._assert_dirs_equal(join(basepath, "[result]"), 

-

                                tmpdir, 

-

                                ignore=["%s.patch" % testname, ".svn", "[result]"]) 

-

 

-

 

-

      shutil.rmtree(tmpdir) 

-

      return 0 

-

 

-

 

-

def add_test_methods(cls): 

-

    """ 

-

    hack to generate test* methods in target class - one 

-

    for each *.patch file in tests directory 

-

    """ 

-

 

-

    # list testcases - every test starts with number 

-

    # and add them as test* methods 

-

    testptn = re.compile(r"^(?P<name>\d{2,}[^\.]+).*$") 

-

 

-

    testset = [testptn.match(e).group('name') for e in listdir(tests_dir) if testptn.match(e)] 

-

    testset = sorted(set(testset)) 

-

 

-

    for filename in testset: 

-

      methname = filename.replace(" ", "_") 

-

      def create_closure(): 

-

        name = filename 

-

        return lambda self: self._run_test(name) 

-

      setattr(cls, "test%s" % methname, create_closure()) 

-

      if verbose: 

-

        print "added test method %s to %s" % (methname, cls) 

-

add_test_methods(TestPatchFiles) 

-

 

-

# ---------------------------------------------------------------------------- 

-

 

-

class TestCheckPatched(unittest.TestCase): 

-

    def setUp(self): 

-

        self.save_cwd = os.getcwdu() 

-

        os.chdir(tests_dir) 

-

 

-

    def tearDown(self): 

-

        os.chdir(self.save_cwd) 

-

 

-

    def test_patched_multipatch(self): 

-

        pto = patch.fromfile("01uni_multi/01uni_multi.patch") 

-

        os.chdir(join(tests_dir, "01uni_multi", "[result]")) 

-

        self.assert_(pto.can_patch("updatedlg.cpp")) 

-

 

-

    def test_can_patch_single_source(self): 

-

        pto2 = patch.fromfile("02uni_newline.patch") 

-

        self.assert_(pto2.can_patch("02uni_newline.from")) 

-

 

-

    def test_can_patch_fails_on_target_file(self): 

-

        pto3 = patch.fromfile("03trail_fname.patch") 

-

        self.assertEqual(None, pto3.can_patch("03trail_fname.to")) 

-

        self.assertEqual(None, pto3.can_patch("not_in_source.also")) 

-

 

-

    def test_multiline_false_on_other_file(self): 

-

        pto = patch.fromfile("01uni_multi/01uni_multi.patch") 

-

        os.chdir(join(tests_dir, "01uni_multi")) 

-

        self.assertFalse(pto.can_patch("updatedlg.cpp")) 

-

 

-

    def test_single_false_on_other_file(self): 

-

        pto3 = patch.fromfile("03trail_fname.patch") 

-

        self.assertFalse(pto3.can_patch("03trail_fname.from")) 

-

 

-

    def test_can_patch_checks_source_filename_even_if_target_can_be_patched(self): 

-

        pto2 = patch.fromfile("04can_patch.patch") 

-

        self.assertFalse(pto2.can_patch("04can_patch.to")) 

-

 

-

# ---------------------------------------------------------------------------- 

-

 

-

class TestPatchParse(unittest.TestCase): 

-

    def test_fromstring(self): 

-

        try: 

-

          f = open(join(tests_dir, "01uni_multi/01uni_multi.patch"), "rb") 

-

          readstr = f.read() 

-

        finally: 

-

          f.close() 

-

        pst = patch.fromstring(readstr) 

-

        self.assertEqual(len(pst), 5) 

-

 

-

    def test_fromfile(self): 

-

        pst = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) 

-

        self.assertNotEqual(pst, False) 

-

        self.assertEqual(len(pst), 5) 

-

        ps2 = patch.fromfile(testfile("failing/not-a-patch.log")) 

-

        self.assertFalse(ps2) 

-

 

-

    def test_no_header_for_plain_diff_with_single_file(self): 

-

        pto = patch.fromfile(join(tests_dir, "03trail_fname.patch")) 

-

        self.assertEqual(pto.items[0].header, []) 

-

 

-

    def test_header_for_second_file_in_svn_diff(self): 

-

        pto = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) 

-

        self.assertEqual(pto.items[1].header[0], 'Index: updatedlg.h\r\n') 

-

        self.assert_(pto.items[1].header[1].startswith('=====')) 

-

 

-

    def test_hunk_desc(self): 

-

        pto = patch.fromfile(testfile('git-changed-file.diff')) 

-

        self.assertEqual(pto.items[0].hunks[0].desc, 'class JSONPluginMgr(object):') 

-

 

-

    def test_autofixed_absolute_path(self): 

-

        pto = patch.fromfile(join(tests_dir, "data/autofix/absolute-path.diff")) 

-

        self.assertEqual(pto.errors, 0) 

-

        self.assertEqual(pto.warnings, 2) 

-

        self.assertEqual(pto.items[0].source, "winnt/tests/run_tests.py") 

-

 

-

    def test_autofixed_parent_path(self): 

-

        # [ ] exception vs return codes for error recovery 

-

        #  [x] separate return code when patch lib compensated the error 

-

        #      (implemented as warning count) 

-

        pto = patch.fromfile(join(tests_dir, "data/autofix/parent-path.diff")) 

-

        self.assertEqual(pto.errors, 0) 

-

        self.assertEqual(pto.warnings, 2) 

-

        self.assertEqual(pto.items[0].source, "patch.py") 

-

 

-

    def test_autofixed_stripped_trailing_whitespace(self): 

-

        pto = patch.fromfile(join(tests_dir, "data/autofix/stripped-trailing-whitespace.diff")) 

-

        self.assertEqual(pto.errors, 0) 

-

        self.assertEqual(pto.warnings, 4) 

-

 

-

    def test_fail_missing_hunk_line(self): 

-

        fp = open(join(tests_dir, "data/failing/missing-hunk-line.diff")) 

-

        pto = patch.PatchSet() 

-

        self.assertNotEqual(pto.parse(fp), True) 

-

        fp.close() 

-

 

-

    def test_fail_context_format(self): 

-

        fp = open(join(tests_dir, "data/failing/context-format.diff")) 

-

        res = patch.PatchSet().parse(fp) 

-

        self.assertFalse(res) 

-

        fp.close() 

-

 

-

    def test_fail_not_a_patch(self): 

-

        fp = open(join(tests_dir, "data/failing/not-a-patch.log")) 

-

        res = patch.PatchSet().parse(fp) 

-

        self.assertFalse(res) 

-

        fp.close() 

-

 

-

    def test_diffstat(self): 

-

        output = """\ 

-

updatedlg.cpp | 20 ++++++++++++++++++-- 

-

updatedlg.h   |  1 + 

-

manifest.xml  | 15 ++++++++------- 

-

conf.cpp      | 23 +++++++++++++++++------ 

-

conf.h        |  7 ++++--- 

-

5 files changed, 48 insertions(+), 18 deletions(-), +1203 bytes""" 

-

        pto = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) 

-

        self.assertEqual(pto.diffstat(), output, "Output doesn't match") 

-

 

-

class TestPatchSetDetect(unittest.TestCase): 

-

    def test_svn_detected(self): 

-

        pto = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) 

-

        self.assertEqual(pto.type, patch.SVN) 

-

 

-

    def test_hg_detected(self): 

-

        pto = patch.fromfile(join(tests_dir, "data/hg-added-file.diff")) 

-

        self.assertEqual(pto.type, patch.HG) 

-

 

-

    def test_hg_exported(self): 

-

        pto = patch.fromfile(join(tests_dir, "data/hg-exported.diff")) 

-

        self.assertEqual(pto.type, patch.HG) 

-

 

-

    def test_git_changed_detected(self): 

-

        pto = patch.fromfile(join(tests_dir, "data/git-changed-file.diff")) 

-

        self.assertEqual(pto.type, patch.GIT) 

-

 

-

class TestPatchApply(unittest.TestCase): 

-

    def setUp(self): 

-

        self.save_cwd = os.getcwdu() 

-

        self.tmpdir = mkdtemp(prefix=self.__class__.__name__) 

-

        os.chdir(self.tmpdir) 

-

 

-

    def tearDown(self): 

-

        os.chdir(self.save_cwd) 

-

        shutil.rmtree(self.tmpdir) 

-

 

-

    def tmpcopy(self, filenames): 

-

        """copy file(s) from test_dir to self.tmpdir""" 

-

        for f in filenames: 

-

          shutil.copy(join(tests_dir, f), self.tmpdir) 

-

 

-

    def test_apply_returns_false_on_failure(self): 

-

        self.tmpcopy(['data/failing/non-empty-patch-for-empty-file.diff', 

-

                      'data/failing/upload.py']) 

-

        pto = patch.fromfile('non-empty-patch-for-empty-file.diff') 

-

        self.assertFalse(pto.apply()) 

-

 

-

    def test_apply_returns_true_on_success(self): 

-

        self.tmpcopy(['03trail_fname.patch', 

-

                      '03trail_fname.from']) 

-

        pto = patch.fromfile('03trail_fname.patch') 

-

        self.assert_(pto.apply()) 

-

 

-

    def test_revert(self): 

-

        self.tmpcopy(['03trail_fname.patch', 

-

                      '03trail_fname.from']) 

-

        pto = patch.fromfile('03trail_fname.patch') 

-

        self.assert_(pto.apply()) 

-

        self.assertNotEqual(open(self.tmpdir + '/03trail_fname.from').read(), 

-

                            open(tests_dir + '/03trail_fname.from').read()) 

-

        self.assert_(pto.revert()) 

-

        self.assertEqual(open(self.tmpdir + '/03trail_fname.from').read(), 

-

                         open(tests_dir + '/03trail_fname.from').read()) 

-

 

-

    def test_apply_root(self): 

-

        treeroot = join(self.tmpdir, 'rootparent') 

-

        shutil.copytree(join(tests_dir, '06nested'), treeroot) 

-

        pto = patch.fromfile(join(tests_dir, '06nested/06nested.patch')) 

-

        self.assert_(pto.apply(root=treeroot)) 

-

 

-

    def test_apply_strip(self): 

-

        treeroot = join(self.tmpdir, 'rootparent') 

-

        shutil.copytree(join(tests_dir, '06nested'), treeroot) 

-

        pto = patch.fromfile(join(tests_dir, '06nested/06nested.patch')) 

-

        for p in pto: 

-

          p.source = 'nasty/prefix/' + p.source 

-

          p.target = 'nasty/prefix/' + p.target 

-

        self.assert_(pto.apply(strip=2, root=treeroot)) 

-

 

-

class TestHelpers(unittest.TestCase): 

-

    # unittest setting 

-

    longMessage = True 

-

 

-

    absolute = ['/', 'c:\\', 'c:/', '\\', '/path', 'c:\\path'] 

-

    relative = ['path', 'path:\\', 'path:/', 'path\\', 'path/', 'path\\path'] 

-

 

-

    def test_xisabs(self): 

-

        for path in self.absolute: 

-

            self.assertTrue(patch.xisabs(path), 'Target path: ' + repr(path)) 

-

        for path in self.relative: 

-

            self.assertFalse(patch.xisabs(path), 'Target path: ' + repr(path)) 

-

 

-

    def test_xnormpath(self): 

-

        path = "../something/..\\..\\file.to.patch" 

-

        self.assertEqual(patch.xnormpath(path), '../../file.to.patch') 

-

 

-

    def test_xstrip(self): 

-

        for path in self.absolute[:4]: 

-

            self.assertEqual(patch.xstrip(path), '') 

-

        for path in self.absolute[4:6]: 

-

            self.assertEqual(patch.xstrip(path), 'path') 

-

        # test relative paths are not affected 

-

        for path in self.relative: 

-

            self.assertEqual(patch.xstrip(path), path) 

-

 

-

    def test_pathstrip(self): 

-

        self.assertEqual(patch.pathstrip('path/to/test/name.diff', 2), 'test/name.diff') 

-

        self.assertEqual(patch.pathstrip('path/name.diff', 1), 'name.diff') 

-

        self.assertEqual(patch.pathstrip('path/name.diff', 0), 'path/name.diff') 

-

 

-

# ---------------------------------------------------------------------------- 

-

 

-

if __name__ == '__main__': 

-

    unittest.main() 

- -
- - - - - - diff --git a/tests/coverage/status.dat b/tests/coverage/status.dat deleted file mode 100644 index d24a17b..0000000 --- a/tests/coverage/status.dat +++ /dev/null @@ -1,101 +0,0 @@ -(dp1 -S'files' -p2 -(dp3 -S'run_tests' -p4 -(dp5 -S'index' -p6 -(dp7 -S'html_filename' -p8 -S'run_tests.html' -p9 -sS'name' -p10 -g4 -sS'nums' -p11 -ccopy_reg -_reconstructor -p12 -(ccoverage.results -Numbers -p13 -c__builtin__ -object -p14 -NtRp15 -(dp16 -S'n_files' -p17 -I1 -sS'n_branches' -p18 -I0 -sS'n_statements' -p19 -I254 -sS'n_excluded' -p20 -I0 -sS'n_partial_branches' -p21 -I0 -sS'n_missing' -p22 -I7 -sS'n_missing_branches' -p23 -I0 -sbssS'hash' -p24 -S'\xc8=\xb6`\x8e\xdfV\xe4\xa9|\x89\x98\x1b\xe6\x03\xa9' -p25 -ssS'___py_mylib_python-patch_patch' -p26 -(dp27 -g6 -(dp28 -g8 -S'___py_mylib_python-patch_patch.html' -p29 -sg10 -S'C:\\__py\\mylib\\python-patch\\patch' -p30 -sg11 -g12 -(g13 -g14 -NtRp31 -(dp32 -g17 -I1 -sg18 -I0 -sg19 -I673 -sg20 -I0 -sg21 -I0 -sg22 -I157 -sg23 -I0 -sbssg24 -S'\xd6l~y\xf4"\x12i\x17\x04\xf1\xd8.%\xd5\xad' -p33 -sssS'version' -p34 -S'3.7.1' -p35 -sS'settings' -p36 -S'<\x7f\xa3\xca\x18+\r\r\xaeDTgp\xe0\xba\xd9' -p37 -sS'format' -p38 -I1 -s. \ No newline at end of file diff --git a/tests/coverage/style.css b/tests/coverage/style.css deleted file mode 100644 index 811c640..0000000 --- a/tests/coverage/style.css +++ /dev/null @@ -1,300 +0,0 @@ -/* CSS styles for Coverage. */ -/* Page-wide styles */ -html, body, h1, h2, h3, p, td, th { - margin: 0; - padding: 0; - border: 0; - outline: 0; - font-weight: inherit; - font-style: inherit; - font-size: 100%; - font-family: inherit; - vertical-align: baseline; - } - -/* Set baseline grid to 16 pt. */ -body { - font-family: georgia, serif; - font-size: 1em; - } - -html>body { - font-size: 16px; - } - -/* Set base font size to 12/16 */ -p { - font-size: .75em; /* 12/16 */ - line-height: 1.33333333em; /* 16/12 */ - } - -table { - border-collapse: collapse; - } - -a.nav { - text-decoration: none; - color: inherit; - } -a.nav:hover { - text-decoration: underline; - color: inherit; - } - -/* Page structure */ -#header { - background: #f8f8f8; - width: 100%; - border-bottom: 1px solid #eee; - } - -#source { - padding: 1em; - font-family: "courier new", monospace; - } - -#indexfile #footer { - margin: 1em 3em; - } - -#pyfile #footer { - margin: 1em 1em; - } - -#footer .content { - padding: 0; - font-size: 85%; - font-family: verdana, sans-serif; - color: #666666; - font-style: italic; - } - -#index { - margin: 1em 0 0 3em; - } - -/* Header styles */ -#header .content { - padding: 1em 3em; - } - -h1 { - font-size: 1.25em; -} - -h2.stats { - margin-top: .5em; - font-size: 1em; -} -.stats span { - border: 1px solid; - padding: .1em .25em; - margin: 0 .1em; - cursor: pointer; - border-color: #999 #ccc #ccc #999; -} -.stats span.hide_run, .stats span.hide_exc, -.stats span.hide_mis, .stats span.hide_par, -.stats span.par.hide_run.hide_par { - border-color: #ccc #999 #999 #ccc; -} -.stats span.par.hide_run { - border-color: #999 #ccc #ccc #999; -} - -.stats span.run { - background: #ddffdd; -} -.stats span.exc { - background: #eeeeee; -} -.stats span.mis { - background: #ffdddd; -} -.stats span.hide_run { - background: #eeffee; -} -.stats span.hide_exc { - background: #f5f5f5; -} -.stats span.hide_mis { - background: #ffeeee; -} -.stats span.par { - background: #ffffaa; -} -.stats span.hide_par { - background: #ffffcc; -} - -/* Help panel */ -#keyboard_icon { - float: right; - cursor: pointer; -} - -.help_panel { - position: absolute; - background: #ffc; - padding: .5em; - border: 1px solid #883; - display: none; -} - -#indexfile .help_panel { - width: 20em; height: 4em; -} - -#pyfile .help_panel { - width: 16em; height: 8em; -} - -.help_panel .legend { - font-style: italic; - margin-bottom: 1em; -} - -#panel_icon { - float: right; - cursor: pointer; -} - -.keyhelp { - margin: .75em; -} - -.keyhelp .key { - border: 1px solid black; - border-color: #888 #333 #333 #888; - padding: .1em .35em; - font-family: monospace; - font-weight: bold; - background: #eee; -} - -/* Source file styles */ -.linenos p { - text-align: right; - margin: 0; - padding: 0 .5em; - color: #999999; - font-family: verdana, sans-serif; - font-size: .625em; /* 10/16 */ - line-height: 1.6em; /* 16/10 */ - } -.linenos p.highlight { - background: #ffdd00; - } -.linenos p a { - text-decoration: none; - color: #999999; - } -.linenos p a:hover { - text-decoration: underline; - color: #999999; - } - -td.text { - width: 100%; - } -.text p { - margin: 0; - padding: 0 0 0 .5em; - border-left: 2px solid #ffffff; - white-space: nowrap; - } - -.text p.mis { - background: #ffdddd; - border-left: 2px solid #ff0000; - } -.text p.run, .text p.run.hide_par { - background: #ddffdd; - border-left: 2px solid #00ff00; - } -.text p.exc { - background: #eeeeee; - border-left: 2px solid #808080; - } -.text p.par, .text p.par.hide_run { - background: #ffffaa; - border-left: 2px solid #eeee99; - } -.text p.hide_run, .text p.hide_exc, .text p.hide_mis, .text p.hide_par, -.text p.hide_run.hide_par { - background: inherit; - } - -.text span.annotate { - font-family: georgia; - font-style: italic; - color: #666; - float: right; - padding-right: .5em; - } -.text p.hide_par span.annotate { - display: none; - } - -/* Syntax coloring */ -.text .com { - color: green; - font-style: italic; - line-height: 1px; - } -.text .key { - font-weight: bold; - line-height: 1px; - } -.text .str { - color: #000080; - } - -/* index styles */ -#index td, #index th { - text-align: right; - width: 5em; - padding: .25em .5em; - border-bottom: 1px solid #eee; - } -#index th { - font-style: italic; - color: #333; - border-bottom: 1px solid #ccc; - cursor: pointer; - } -#index th:hover { - background: #eee; - border-bottom: 1px solid #999; - } -#index td.left, #index th.left { - padding-left: 0; - } -#index td.right, #index th.right { - padding-right: 0; - } -#index th.headerSortDown, #index th.headerSortUp { - border-bottom: 1px solid #000; - } -#index td.name, #index th.name { - text-align: left; - width: auto; - } -#index td.name a { - text-decoration: none; - color: #000; - } -#index td.name a:hover { - text-decoration: underline; - color: #000; - } -#index tr.total { - } -#index tr.total td { - font-weight: bold; - border-top: 1px solid #ccc; - border-bottom: none; - } -#index tr.file:hover { - background: #eeeeee; - } diff --git a/tests/recoverage.bat b/tests/recoverage.bat index 92a8c3b..2792a93 100644 --- a/tests/recoverage.bat +++ b/tests/recoverage.bat @@ -1,2 +1,4 @@ -py -m coverage.__main__ run run_tests.py -py -m coverage.__main__ html -d coverage +cd .. +python -m coverage run tests/run_tests.py +python -m coverage html -d tests/coverage +python -m coverage report -m diff --git a/tests/run_tests.py b/tests/run_tests.py index 10de02e..876aeae 100755 --- a/tests/run_tests.py +++ b/tests/run_tests.py @@ -29,6 +29,7 @@ On Windows it may be more convenient instead of `coverage` call `python -m coverage.__main__` """ +from __future__ import print_function import os import sys @@ -39,6 +40,10 @@ from os import listdir from os.path import abspath, dirname, exists, join, isdir, isfile from tempfile import mkdtemp +try: + getcwdu = os.getcwdu +except AttributeError: + getcwdu = os.getcwd # python 3, where getcwd always returns a unicode object verbose = False if "-v" in sys.argv or "--verbose" in sys.argv: @@ -95,9 +100,9 @@ def _assert_dirs_equal(self, dir1, dir2, ignore=[]): continue e1path = join(dir1, e1) e2path = join(dir2, e1) - self.assert_(exists(e1path)) - self.assert_(exists(e2path), "%s does not exist" % e2path) - self.assert_(isdir(e1path) == isdir(e2path)) + self.assertTrue(exists(e1path)) + self.assertTrue(exists(e2path), "%s does not exist" % e2path) + self.assertTrue(isdir(e1path) == isdir(e2path)) if not isdir(e1path): self._assert_files_equal(e1path, e2path) else: @@ -144,11 +149,11 @@ def _run_test(self, testname): # 3. # test utility as a whole patch_tool = join(dirname(TESTS), "patch.py") - save_cwd = os.getcwdu() + save_cwd = getcwdu() os.chdir(tmpdir) if verbose: cmd = '%s %s "%s"' % (sys.executable, patch_tool, patch_file) - print "\n"+cmd + print("\n"+cmd) else: cmd = '%s %s -q "%s"' % (sys.executable, patch_tool, patch_file) ret = os.system(cmd) @@ -192,14 +197,14 @@ def create_closure(): test = create_closure() setattr(cls, methname, test) if verbose: - print "added test method %s to %s" % (methname, cls) + print("added test method %s to %s" % (methname, cls)) add_test_methods(TestPatchFiles) # ---------------------------------------------------------------------------- class TestCheckPatched(unittest.TestCase): def setUp(self): - self.save_cwd = os.getcwdu() + self.save_cwd = getcwdu() os.chdir(TESTS) def tearDown(self): @@ -208,21 +213,21 @@ def tearDown(self): def test_patched_multipatch(self): pto = patch.fromfile("01uni_multi/01uni_multi.patch") os.chdir(join(TESTS, "01uni_multi", "[result]")) - self.assert_(pto.can_patch("updatedlg.cpp")) + self.assertTrue(pto.can_patch(b"updatedlg.cpp")) def test_can_patch_single_source(self): pto2 = patch.fromfile("02uni_newline.patch") - self.assert_(pto2.can_patch("02uni_newline.from")) + self.assertTrue(pto2.can_patch(b"02uni_newline.from")) def test_can_patch_fails_on_target_file(self): pto3 = patch.fromfile("03trail_fname.patch") - self.assertEqual(None, pto3.can_patch("03trail_fname.to")) - self.assertEqual(None, pto3.can_patch("not_in_source.also")) + self.assertEqual(None, pto3.can_patch(b"03trail_fname.to")) + self.assertEqual(None, pto3.can_patch(b"not_in_source.also")) def test_multiline_false_on_other_file(self): pto = patch.fromfile("01uni_multi/01uni_multi.patch") os.chdir(join(TESTS, "01uni_multi")) - self.assertFalse(pto.can_patch("updatedlg.cpp")) + self.assertFalse(pto.can_patch(b"updatedlg.cpp")) def test_single_false_on_other_file(self): pto3 = patch.fromfile("03trail_fname.patch") @@ -257,18 +262,18 @@ def test_no_header_for_plain_diff_with_single_file(self): def test_header_for_second_file_in_svn_diff(self): pto = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) - self.assertEqual(pto.items[1].header[0], 'Index: updatedlg.h\r\n') - self.assert_(pto.items[1].header[1].startswith('=====')) + self.assertEqual(pto.items[1].header[0], b'Index: updatedlg.h\r\n') + self.assertTrue(pto.items[1].header[1].startswith(b'=====')) def test_hunk_desc(self): pto = patch.fromfile(testfile('git-changed-file.diff')) - self.assertEqual(pto.items[0].hunks[0].desc, 'class JSONPluginMgr(object):') + self.assertEqual(pto.items[0].hunks[0].desc, b'class JSONPluginMgr(object):') def test_autofixed_absolute_path(self): pto = patch.fromfile(join(TESTS, "data/autofix/absolute-path.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 2) - self.assertEqual(pto.items[0].source, "winnt/tests/run_tests.py") + self.assertEqual(pto.items[0].source, b"winnt/tests/run_tests.py") def test_autofixed_parent_path(self): # [ ] exception vs return codes for error recovery @@ -277,7 +282,7 @@ def test_autofixed_parent_path(self): pto = patch.fromfile(join(TESTS, "data/autofix/parent-path.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 2) - self.assertEqual(pto.items[0].source, "patch.py") + self.assertEqual(pto.items[0].source, b"patch.py") def test_autofixed_stripped_trailing_whitespace(self): pto = patch.fromfile(join(TESTS, "data/autofix/stripped-trailing-whitespace.diff")) @@ -285,19 +290,19 @@ def test_autofixed_stripped_trailing_whitespace(self): self.assertEqual(pto.warnings, 4) def test_fail_missing_hunk_line(self): - fp = open(join(TESTS, "data/failing/missing-hunk-line.diff")) + fp = open(join(TESTS, "data/failing/missing-hunk-line.diff"), 'rb') pto = patch.PatchSet() self.assertNotEqual(pto.parse(fp), True) fp.close() def test_fail_context_format(self): - fp = open(join(TESTS, "data/failing/context-format.diff")) + fp = open(join(TESTS, "data/failing/context-format.diff"), 'rb') res = patch.PatchSet().parse(fp) self.assertFalse(res) fp.close() def test_fail_not_a_patch(self): - fp = open(join(TESTS, "data/failing/not-a-patch.log")) + fp = open(join(TESTS, "data/failing/not-a-patch.log"), 'rb') res = patch.PatchSet().parse(fp) self.assertFalse(res) fp.close() @@ -345,12 +350,12 @@ def test(self): test = generate_detection_test(filename, difftype) setattr(TestPatchSetDetection, name, test) if verbose: - print "added test method %s to %s" % (name, 'TestPatchSetDetection') + print("added test method %s to %s" % (name, 'TestPatchSetDetection')) class TestPatchApply(unittest.TestCase): def setUp(self): - self.save_cwd = os.getcwdu() + self.save_cwd = getcwdu() self.tmpdir = mkdtemp(prefix=self.__class__.__name__) os.chdir(self.tmpdir) @@ -373,41 +378,45 @@ def test_apply_returns_true_on_success(self): self.tmpcopy(['03trail_fname.patch', '03trail_fname.from']) pto = patch.fromfile('03trail_fname.patch') - self.assert_(pto.apply()) + self.assertTrue(pto.apply()) def test_revert(self): + def get_file_content(filename): + with open(filename, 'rb') as f: + return f.read() + self.tmpcopy(['03trail_fname.patch', '03trail_fname.from']) pto = patch.fromfile('03trail_fname.patch') - self.assert_(pto.apply()) - self.assertNotEqual(open(self.tmpdir + '/03trail_fname.from').read(), - open(TESTS + '/03trail_fname.from').read()) - self.assert_(pto.revert()) - self.assertEqual(open(self.tmpdir + '/03trail_fname.from').read(), - open(TESTS + '/03trail_fname.from').read()) + self.assertTrue(pto.apply()) + self.assertNotEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), + get_file_content(TESTS + '/03trail_fname.from')) + self.assertTrue(pto.revert()) + self.assertEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), + get_file_content(TESTS + '/03trail_fname.from')) def test_apply_root(self): treeroot = join(self.tmpdir, 'rootparent') shutil.copytree(join(TESTS, '06nested'), treeroot) pto = patch.fromfile(join(TESTS, '06nested/06nested.patch')) - self.assert_(pto.apply(root=treeroot)) + self.assertTrue(pto.apply(root=treeroot)) def test_apply_strip(self): treeroot = join(self.tmpdir, 'rootparent') shutil.copytree(join(TESTS, '06nested'), treeroot) pto = patch.fromfile(join(TESTS, '06nested/06nested.patch')) for p in pto: - p.source = 'nasty/prefix/' + p.source - p.target = 'nasty/prefix/' + p.target - self.assert_(pto.apply(strip=2, root=treeroot)) + p.source = b'nasty/prefix/' + p.source + p.target = b'nasty/prefix/' + p.target + self.assertTrue(pto.apply(strip=2, root=treeroot)) class TestHelpers(unittest.TestCase): # unittest setting longMessage = True - absolute = ['/', 'c:\\', 'c:/', '\\', '/path', 'c:\\path'] - relative = ['path', 'path:\\', 'path:/', 'path\\', 'path/', 'path\\path'] + absolute = [b'/', b'c:\\', b'c:/', b'\\', b'/path', b'c:\\path'] + relative = [b'path', b'path:\\', b'path:/', b'path\\', b'path/', b'path\\path'] def test_xisabs(self): for path in self.absolute: @@ -416,22 +425,22 @@ def test_xisabs(self): self.assertFalse(patch.xisabs(path), 'Target path: ' + repr(path)) def test_xnormpath(self): - path = "../something/..\\..\\file.to.patch" - self.assertEqual(patch.xnormpath(path), '../../file.to.patch') + path = b"../something/..\\..\\file.to.patch" + self.assertEqual(patch.xnormpath(path), b'../../file.to.patch') def test_xstrip(self): for path in self.absolute[:4]: - self.assertEqual(patch.xstrip(path), '') + self.assertEqual(patch.xstrip(path), b'') for path in self.absolute[4:6]: - self.assertEqual(patch.xstrip(path), 'path') + self.assertEqual(patch.xstrip(path), b'path') # test relative paths are not affected for path in self.relative: self.assertEqual(patch.xstrip(path), path) def test_pathstrip(self): - self.assertEqual(patch.pathstrip('path/to/test/name.diff', 2), 'test/name.diff') - self.assertEqual(patch.pathstrip('path/name.diff', 1), 'name.diff') - self.assertEqual(patch.pathstrip('path/name.diff', 0), 'path/name.diff') + self.assertEqual(patch.pathstrip(b'path/to/test/name.diff', 2), b'test/name.diff') + self.assertEqual(patch.pathstrip(b'path/name.diff', 1), b'name.diff') + self.assertEqual(patch.pathstrip(b'path/name.diff', 0), b'path/name.diff') # ----------------------------------------------------------------------------