diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 239708f..0000000 --- a/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: python -python: - - "2.7" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.7-dev" - - "3.8-dev" - -os: linux -arch: - - amd64 - - arm64 - -install: - - pip install coverage - - pip install pypack - -script: - # run tests with coverage - - coverage run tests/run_tests.py - - coverage report -m - # test distribution packaging - - python -m pypack patch.py diff --git a/APIUseCases.md b/APIUseCases.md new file mode 100644 index 0000000..00ec0bd --- /dev/null +++ b/APIUseCases.md @@ -0,0 +1,55 @@ + +``` +import patch +``` + +### Detect if a file is a valid patch ### +``` +>>> bool( patch.fromfile('doc/example.diff.diff') ) +True +``` + +### Print diffstat ### +``` +>>> print(patch.fromfile('tests/01uni_multi.patch').diffstat()) + updatedlg.cpp | 20 ++++++++++++++++++-- + updatedlg.h | 1 + + manifest.xml | 15 ++++++++------- + conf.cpp | 23 +++++++++++++++++------ + conf.h | 7 ++++--- + 5 files changed, 48 insertions(+), 18 deletions(-), +1203 bytes +``` + +### Find all patch files and do something about them ### +``` +import patch, os + +matched = 0 +for file in os.listdir('.'): + if os.path.isdir(file): + continue + + with open(file, "rb") as fp: + ps = patch.PatchSet() + if not ps.parse(fp): + pass + else: + print(file) + for each in ps: + print(" " + each.target) + for h in each.hunks: + if h.desc: + print(" " + h.desc) + matched += 1 + +print('Found %s' % matched) +``` + +## User stories ## +### 01. Upgrade Trac environment files ### + +**Storyline:** I want to upgrade Trac environment while upgrading Trac itself from 0.9 to 0.11. This requires patching documentation files (README) in environment to a new version. It may not worth to distribute full version of documentation, so I want to detect what files are not patched, ensure that patch applies clearly before processing. + +**Proposed API:** + +**Example:** \ No newline at end of file diff --git a/DESIGN.md b/DESIGN.md new file mode 100644 index 0000000..9d2e8f5 --- /dev/null +++ b/DESIGN.md @@ -0,0 +1,231 @@ +#author Anatoly Techtonik + +The initial goal was to make cross-platform alternative to unix patch +utility. This utility should not require compiling, must be easy to +extend and should be run as standalone tool or as server process. That's +why Python was chosen. + +Another goal was to make a library for automation of patching tasks as +part of rainforce project to encourage development of tools that will +make "patchwork" more intuitive and easy. + +And the last goal was to reinvert line parser theory from scratch, because +you need to reinvent things to prove that there is no better way to do +something (or to understand what do you need to look for). + + +## Design decisions ## + +#### Patch target file if source is not found #### + +If the source file is not found while patching, **patch.py** tries to patch +target filename. This logic is used to process old style manual patches +that were made by comparing backup file (with some fancy .old extension) +with modified version that holds original name. + +#### "Already patched" detection #### + +_unified diff format_ doesn't allow to correctly validate if file is +already patched, but only to check if it could be patched. It is because +in some rare cases it is possible to apply the same patch several times +(see 04check\_patched example in tests). + +Only checksum can reliably detect if file is not patched, but in this case +linefeed differences or minor changes in file will mark file as +non-patchable, while in fact it could be possible if these minor changes +do not intersect with unified diff contents. This is the feature of +unified format - non-conlicting patches can be applied in different +order. Some conflicts can even be resolved automatically (not by this lib +so far). Version control systems actually do this during merges. + + +## Parsers overview ## + + +## Parser no.0 - Brute Force Line By Line Regexp ## + +Versions before 8.06. + +Initially the process was very straightforward. Main cycle reads one line +at a time from the input stream. Detects to which part of Diff it belongs +(free header, filename header, hunk) and parses it into corresponding data +structure. The line is discarded at the end of each cycle. This guarantees +no endless loops or recursions as long as input stream is finite. + +The parser code is one big `for` loop with a series of "parsing blocks" +at the root level. After a line is read at the loop start, each parsing +block then tested it with an `if` condition to see if it should process +the line. If condition matched the block then extracted text into Python +structure. Blocks can't request more lines, but they can use `continue` +command to start new cycle without waiting until the end of cycle (this +also prevented the line from being accidentally processed by blocks below). + +Testing the line with `if` had two major drawbacks: + + 1. regular expressions used in condition checks made the code obscure + 1. line could be intercepted by wrong block, and an extra effort required to place blocks in proper order + +To illustrate second problem take lines starting with "+" or "-" for +example. They should be parsed differently depending on where in diff they +are located - in header block they are just usual lines that are not +parsed at all, but for hunk they are the main data. + +## Parser no.1 - Line By Line State Machine ## + +Versions 8.06 up to 10.04. + +To make parser code more clear, regular expression checks were replaced +with checks of state variables. These local boolean variables were named +`header`, `filenames`, `hunkhead`, `hunkbody` after the regions in unified +diff format. Only one variable is set to true at any given time, so parser +is said to be in one given state at any moment. This made debug process +significantly easier. + +After this change `if`s at the top level of main cycle started to check +state variables instead of probing line content to delegate line processing +into their parsing block. When block finishes processing, it is responsible +for switching state to the next one. Sometime the next state should be +chosen from several possible alternatives, and parsing block needs content +of the following line to make the decision. As the line is discarded at the +end of cycle - blocks are still required to be placed in proper order. + +Main cycle turned to be more readable, but checks at the end of parsing +blocks become more sophisticated. + +So, while state machine isolated parsing blocks from stealing lines from +eash other, it still has drawbacks: + 1. blocks should be placed in proper order + 1. parsing blocks that make a decision when switching state should know about lines (i.e. context) of successor blocks + 1. state checks are made for every input line, because block can't request more lines in the middle of input cycle + +Let's not forget benefits: + 1. debug is easier, code is more readable + 1. parser is still non-recursive + 1. no risk of endless loops + +This state machine allowed to introduce new `hunkskip` state for recovery +from corrupted or invalid hunk. When such hunk is encountered - parser +switches to `hunkskip` state and skips input lines until it finds header of +the next hunk or filenames section. It appeared that the same check is done +when hunk ends as usual, so the state check after hunk was delegated to +`hunkskip` block as well. + +The parsing block for `hunkskip` state doesn't actually parse any data - it +exists solely for making branching decision. Until 'hunkskip' state primary +purpose of blocks in main cycle was extracting data, that's why they were +named "parsing blocks", but 'hunkskip' introduced new class of blocks that +can be named "decider blocks". + +The order of `hunkskip` is after `hunkbody` parser and before `filenames` +and `hunkhead` parsing blocks. This guarantees that these blocks get their +line after the state switch and before the line is discarded. + +``` +TODO: describe "missing line" problem in state recovery with two + interleaving blocks, when it is impossible to choose which block + should be placed ahead of the other +``` + +This line by line parser with lines that "fall through" arranged parsing +blocks may be the fastest possible implementation. There are no calls, no +repeated checks after state switching. But the code is still hard to read +and extend due to these implicit arrangements. This can be a minor issue +though as unified diff format is simple and such optimizations could be +the way to go in the future. + + +To summarize: + * static code analysis is easier thanks to states instead of regexps for branching execution; + * state checks are run for every line of the input stream (blocks can't request more lines); + * blocks are not explicitly chained; + * but still arranged in specific order; + * every parsing block knows to which state it should switch after processing; + * this makes state switching complicated when there is choice; + * because block should fetch the next line to make a decision; + * line should be processed by the right block until the end of the cycle; + * if it is impossible to rearrange blocks in the appropriate order then a "decider block" is needed (e.g `hunkskip`). + +Absence of function calls should speed up things a little. Function calls +could make block chaining explicit, but this exposes parser to stack +depletion problem. It is not actual for this specific parser, where amount +of parsed data in memory is bigger anyway, but it is still worthy to keep +this non-recursive and stackless. + +Development of parser is still complicated, because you need to keep in +mind the correct order of blocks while making modifications, and know about +interleaving blocks corner case to be able to manually detect them before +they hack your mind. + + +## Parser no.2 - Block Context ## + +Versions + +The need to extend patch parser for processing Mercurial and Git formats +required changes to allow easy extension without sacrificing non-recursive +non-looping behavior. The first enhancement was to allow parser blocks +fetch lines from input stream directly without waiting for the line in the +main cycle. Block reads as many lines as it needs, and after that switches +state. All lines that belong to this block are called "block context" and +are not exposed to main `if` cycle. This way there are less chances that +line could be intercepted by the wrong block. + +This feature can be called "isolation of block context". + +While it sounds good, block context can not be fully separated. When block +doesn't know how many lines it needs it just reads the input until a line +out of context is encountered. This line already belongs to another "block +context". Current block switches state to pass control to owner, but the +owner need to catch the line before it is discarded at the end of cycle. + +For example, when header parser reads line that starts with "--- " - it +should switch state and pass this line to the owner - filename parser. +The filename parser should be called before the end of the cycle to avoid +line being overwritten. In case of `if`s structure that means owner's +block check should be located under part of the code that switched state. + +So each block still knows about the next state or states. It should make a +decision where to switch next. Hence it should know about "block contexts" +of its successors. This bloats and complicates code. + +To simplify the code, it is possible to prevent blocks from analysing +each other's context for making switch. The block should only check that +line doesn't belong to its context and switch state to pass control +further to "decider's block", which in turn make the proper state switch. + +"block context separation" is another feature of Parser no.2. + +It's not a complete separation in a sense that header parser still knows +that line starting with "--- " doesn't belong to it. It switches state by +setting `headscan` to False and `filescan` to True and that's all. No +parsing block makes assumptions or decisions where to pass the processing. +There is only one way to switch from parsing block. If there should be a +decision what is to be parsed next, then this decision should be made by +"decider's block" that doesn't parse, but just analyzes context to make a +switch. The problem with arranging pieces in correct order still persists. + +To solve rearrangement problem it is necessary to either reinvent GOTO or +to be able to reinsert analyzed line back into stream for fetching in +the next cycle after state had already been switched. + +Rearrangement problem can be illustrated with two blocks that analyze each +others context to pass control to each other. It is impossible to place +them in the correct order in main cycle, because at the end of processing +the current block should always be higher than the other to pass the line. + +The solution can be in: + * skip fetching line on the next cycle + * add buffer for discarded lines + * reinsert line into the stream + * wrapper switch that judges who gets the next line, this requires one more state variable, and processing lines one by one from the main cycle + +Reinserting lines can provide some performance overhead, wrapper switch +complicates parser, so skip line fetching may be a good solution. + + +So, this parser overcomes strict requirement when line should processed in +the same cycle to avoid being overwritten. Every block requests as many +lines as it needs, switches state to "finished" and returns control to the +beginning of the main cycle. Main cycle analyzes state and passes control +to the next appropriate block. As a side effect there is now a line number +that can be used for error messages and debugging. \ No newline at end of file diff --git a/README.md b/README.md index 6cca3fd..57945c9 100644 --- a/README.md +++ b/README.md @@ -1,54 +1,57 @@ -Library to parse and apply unified diffs. +## Status ## -[![Build Status](https://img.shields.io/travis/techtonik/python-patch/master)](https://travis-ci.org/techtonik/python-patch/branches) [![PyPI](https://img.shields.io/pypi/v/patch)](https://pypi.python.org/pypi/patch) +[![](https://drone.io/techtonik/python-patch/status.png)](https://drone.io/techtonik/python-patch) -### Features +**API status**: API is unstable, so **use strict dependencies on major +version number** when using this tool as a library. - * Python 2 and 3 compatible - * Automatic correction of - * Linefeeds according to patched file - * Diffs broken by stripping trailing whitespace - * a/ and b/ prefixes - * Single file, which is a command line tool and a library - * No dependencies outside Python stdlib - * Patch format detection (SVN, HG, GIT) - * Nice diffstat histogram - * Linux / Windows / OS X - * Test coverage +It understands only _unified diffs_. Currently it doesn't support file +renames, creation and removals. -Things that don't work out of the box: +Note that **patch.py** was not designed to reproduce original files. Parsing +is a lossy process where data is normalized to be cross-platform. Absolute +paths are stripped as well as references to parent directories, backslashes +are converted to forward slashes and so on. - * File renaming, creation and removal - * Directory tree operations - * Version control specific properties - * Non-unified diff formats +**patch.py** is designed to transparently handle line end differences. Line +endings from patch are converted into +best suitable format for patched file. patch.py scans line endings in source +file, and if they are consistent - lines from patch are applied with the +same ending. If source linefeeds are inconsistend - lines from patch are +applied "as is". -### Usage +Parsing of diff is done in a in very straightforward manner as an exercise +to approach the problem of parsing on my own before learning the 'proper +ways'. Thanks creators, _the format of unified diff_ is rather simple (an +illustration of Subversion style unified diff is included in +[source doc/](http://python-patch.googlecode.com/svn/trunk/doc/) directory). -Download **patch.py** and run it with Python. It is a self-contained -module without external dependencies. +## Library usage ## - patch.py diff.patch +See [APIUseCases](APIUseCases.md). -You can also run the .zip file. - - python patch-1.16.zip diff.patch +## Future ## -### Installation +Patch utility in Python makes it possible to implement online "submit, +review and apply" module. Similar to [Review Board](http://www.reviewboard.org/) +for code, but suitable for all kind of textual content that uses +unified diffs as an interchange format between users, website, and version +control system. With this system patches can be applied after on site +review, automatically storing the names of patch contributors in SVN +history logs without requiring write access for these contributors. This +system is not the scope of this project though. -**patch.py** is self sufficient. You can copy it into your repository -and use it from here. This setup will always be repeatable. But if -you need to add `patch` module as a dependency, make sure to use strict -specifiers to avoid hitting an API break when version 2 is released: +Additional unified diff parsers may be added in future to compare different +parsing techniques (with [pyparsing](http://pyparsing.wikispaces.com/), +[SPARK](http://www.ibm.com/developerworks/library/l-spark.html) or +[others](http://www.google.com/Top/Computers/Programming/Languages/Python/Modules/Text_Processing/) +as example). - pip install "patch==1.*" +See also https://code.google.com/p/rainforce/wiki/ModerationQueue - -### Other stuff - -* [CHANGES](doc/CHANGES.md) -* [LICENSE](doc/LICENSE) -* [CREDITS](doc/CREDITS) - -* [test coverage](http://techtonik.github.io/python-patch/tests/coverage/) +It would be nice to further simplify parser, make it more modular to allow easy +customization and extension, but the primary focus for now is to figure out +an API that will make it usable as a library. There is separate TODO item to +check behavior of "\ No newline at end of file" cases. Other goals is to +expand test coverage, and try to make script more interactive. diff --git a/doc/ADOPTIONS.md b/doc/ADOPTIONS.md deleted file mode 100644 index bbc4ea0..0000000 --- a/doc/ADOPTIONS.md +++ /dev/null @@ -1,9 +0,0 @@ -| Project | Description | patch.py version | Reviewed | -|:--------|:------------|:-----------------|:---------| -| [conda-recipes](https://github.com/conda/conda-recipes/tree/master/python-patch)| conda package | [1.12.11](https://github.com/conda/conda-recipes/blob/master/python-patch/patch.py) | 2016-01-17 | -| [collective.recipe.patch](https://pypi.python.org/pypi/collective.recipe.patch/0.2.2) | buildout recipe for patching eggs | [8.06-1+](https://github.com/garbas/collective.recipe.patch/blob/master/collective/recipe/patch/patch.py) | 2014-01-17 | -| [Linux Kernel Backports](https://backports.wiki.kernel.org/index.php/Documentation) | backporting Linux upstream device drivers for usage on older kernels | [1.12.12dev+](https://git.kernel.org/cgit/linux/kernel/git/backports/backports.git/tree/lib/patch.py) | 2014-01-17 | -| [LuaPatch](http://lua-users.org/wiki/LuaPatch) | rewrite of patch.py for Lua by David Manura | 8.06-1| 2014-01-17 | -| [OpenHatch](https://openhatch.org/) | help wannabe open source developers find interesting projects | [10.04-2+](https://github.com/openhatch/oh-mainline/blob/master/vendor/packages/python-patch/patch.py) | 2014-01-17 | -| [nose](https://nose.readthedocs.org/en/latest/) | `nose` extends unittest to make testing easier | [10.04-2+](https://github.com/nose-devs/nose/blob/master/patch.py) | 2014-01-17 | -| [pypatch](https://pypi.python.org/pypi/pypatch/0.5.1) | automatically patch installed python modules | 1.12.11 | 2014-01-17 | diff --git a/doc/CHANGES.md b/doc/CHANGES.md deleted file mode 100644 index 418ed24..0000000 --- a/doc/CHANGES.md +++ /dev/null @@ -1,126 +0,0 @@ -##### 1.16 - - - Python 3 support, thanks to Yen Chi Hsuan (@yan12125) - (pull request #36) - -##### 1.15 - - - Project moved to GitHub - - patch-1.15.zip archive is now executable - - improved Git patch detection thanks to @mspncp (#32) - - tests/data contains database of all possible patches - - tests suite scan now processes tests/data automatically - - API changes: - + setdebug() initializes logging and enables debug info - -##### 1.14.2 - - - --revert option to apply patches in reverse order (unpatch) - - support for broken patches generated by online Google Code editor - - API changes: - + PatchSet and Patch objects are now iterable - + new PatchSet.findfile() contains logic detecting filename to patch - + PatchSet.revert() - - make directory based tests easier to create and run manually - - fix xnormpath with Windows paths on Linux - (issue #24, found by Philippe Ombredanne) - -##### 1.13 - - - diffstat output now also shows size delta in bytes - - added --directory (-d) option to specify root when applying patches - - hunk headers produced by `diff -p` option are now parsed and accessible - (issue #22, found by Philippe Ombredanne) - - API changes: - + Hunk.desc field to access hunk headers content - + PatchSet.apply() gets `root` keyword argument for the working dir - when applying patches (issue #7) - - improve error message for missing files - - improve docs (fix issue #5) - -##### 1.12.11 Major API Break - - - patch.py can read patches from stdin - - patch.py can show nice histogram with --diffstat option - - added detection of SVN, GIT and HG patch types, unrecognized - patches marked as PLAIN - - added error reporting for parsing functions and helpers (now they - return False if parsing failed) - make sure you handle this correctly - - added normalization to filenames to protect against patching files - using absolute paths or files in parent directories - - test run patch.py on all patches submitted to Python bug tracker, which - resulted in improved parsing and error handling for some corner cases - - improved logging - - API changes - * fromfile(), fromstring() and fromurl() now return False on errors - * previous Patch is renamed to PatchSet, new Patch is single file entry - * Patch.header is now a list of strings - * PatchSet.parse() now returns True if parsing completed without errors - + PatchSet.__len__() - + PatchSet.diffstat() - + PatchSet.type and Patch.type - + PatchSet.errors and - + xisabs() cross-platform version of `os.path.isabs()` - + xnormpath() forward slashed version of `os.path.normpath()` - + xstrip() to strip absolute path prefixes - -##### 11.01 - - - patch.py can read patches from web - - patch.py returns -1 if there were errors during patching - - store patch headers (necessary for future DIFF/SVN/HG/GIT detection) - - report about extra bytes at the end after patch is parsed - - API changes - + fromurl() - * Patch.apply() now returns True on success - -##### 10.11 - - - fixed fromstring() failure due to invalid StringIO import (issue #9) - (thanks john.stumpo for reporting) - - added --verbose and --quiet options - - improved message logging - - change "successfully patched..." message to INFO instead of WARN - (thanks Alex Stewart for reporting and patch) - - skip __main__ imports when used as a library (patch by Alex Stewart) - - API changes - * renamed class HunkInfo to Hunk - + Patch.type placeholder (no detection yet - parser is not ready) - + constants for patch types DIFF/PLAIN, HG/MERCURIAL, SVN/SUBVERSION - + Patch.header for saving headers which can be used later to extract - additional meta information such as commit message - - internal: improving parser speed by allowing blocks fetch lines on - demand - - test suite improvements - -##### 10.04 - - - renamed debug option to --debug - - API changes - * method names are now underscored for consistency with difflib - + addded Patch.can_patch(filename) to test if source file is in list - of source filenames and can be patched - * use designated logger "python_patch" instead of default - -##### 9.08-2 - - - compatibility fix for Python 2.4 - -##### 9.08-1 - - - fixed issue #2 - remove trailing whitespaces from filename - (thanks James from Twisted Fish) - - API changes - + added Patch and HunkInfo classes - * moved utility methods into Patch - + build Patch object by specifying stream to constructor - or use top level functions fromfile() and fromstring() - - added test suite - -##### 8.06-2 - - - compatibility fix for Python 2.4 - -##### 8.06-1 - - - initial release diff --git a/doc/CREDITS b/doc/CREDITS deleted file mode 100644 index 16cb9d7..0000000 --- a/doc/CREDITS +++ /dev/null @@ -1,10 +0,0 @@ -I'd like to thank the following people who contributed to -development of this library: - - -Alex Stewart -Wladimir J. van der Laan (laanwj) -azasypkin -Philippe Ombredanne -mspncp -Yen Chi Hsuan (@yan12125) diff --git a/doc/HISTORY b/doc/HISTORY deleted file mode 100644 index 636a26d..0000000 --- a/doc/HISTORY +++ /dev/null @@ -1,16 +0,0 @@ -In 2008 there was no reliable Windows tool to apply patches, -and there was no cross-platform solution that could be safely -run by web server process. - -(UNIX *patch* utility)[http://www.gnu.org/software/patch/] was -(ported to windows)[http://gnuwin32.sourceforge.net/packages/patch.htm], -but there were (a couple of bugs) -[http://www.google.com/search?q=Assertion+failed%3A+hunk%2C+file+patch.c] -that proved that it can not be run securely in web server process. -The utility was also hard to tweak without a C compiler, it messed -badly with LF and CRLF line end differences, and so this project -was born. - -*patch.py* was meant to be a cross-platoform tool with intuitive -defaults, taking care of the most problems (e.g. line end -differences) automatically. diff --git a/doc/LICENSE b/doc/LICENSE deleted file mode 100644 index e172f7a..0000000 --- a/doc/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -MIT License ------------ - -Copyright (c) 2008-2016 anatoly techtonik - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/doc/RELEASING b/doc/RELEASING deleted file mode 100644 index f367207..0000000 --- a/doc/RELEASING +++ /dev/null @@ -1,19 +0,0 @@ -* [ ] Pack .zip archive - - pip install pypack - python -m pypack patch.py - -* [ ] Write changelog - -* [ ] Upload archive to PyPI (manually for now) - * [ ] Create new version https://pypi.python.org/pypi?%3Aaction=submit_form&name=patch - * [ ] Upload .zip for this version - -* [ ] Update PyPI description - * [ ] Download PKG-INFO - * [ ] Edit and upload - -* [ ] Tag release - - git tag -a - git push --follow-tags diff --git a/doc/evolution-notes.txt b/doc/evolution-notes.txt deleted file mode 100644 index 7c544c2..0000000 --- a/doc/evolution-notes.txt +++ /dev/null @@ -1,34 +0,0 @@ -patchset evolution - -(diff era) -1. change some content in a stream -1. change some lines in a file -1. protect change with context -1. change several files - -(git diff era) -2. create file -2. rename file -2. move file -2. copy file -2. copy and rename -2. move and rename -2. remove file - -3. know file attributes -3. know file mime-type -3. know file binary/text -3. change file attributes - -(2D patch jump) -4. create directory -4. rename directory -4. move directory -4. copy directory -4. copy and rename -4. move and rename - -5. know directory contents -5. record directory tree in 1D structure -5. record changes for 2D structure in 1D structure - diff --git a/doc/unified_diff_svn.png b/doc/unified_diff_svn.png deleted file mode 100644 index 77aea61..0000000 Binary files a/doc/unified_diff_svn.png and /dev/null differ diff --git a/doc/unified_diff_svn.svg b/doc/unified_diff_svn.svg deleted file mode 100644 index e9e794b..0000000 --- a/doc/unified_diff_svn.svg +++ /dev/null @@ -1,1461 +0,0 @@ - - - - - - - - image/svg+xml - - - - - - - - - - - - - - - - - - - - - - Unified Diff/Patch Format (Subversion) - - - comments are ignored - - - - the first file thatexists is used - - - - may contain severalhunks for each file - - - - -line_from,total_before+line_after,total_after - - - - --- filename \t comment - - - - line ends may differ - - - - for the format like-line_from +line_aftertotal_xxx = 1 - - - - - "\ No newline at end of file" marker is used iffile ends without newline - - - http://en.wikipedia.org/wiki/Diff#Unified_format - http://techtonik.rainforce.org - Index: src/plugins/contrib/devpak_plugin/updatedlg.cpp===================================================================--- src/plugins/contrib/devpak_plugin/updatedlg.cpp (revision 5106)+++ src/plugins/contrib/devpak_plugin/updatedlg.cpp (working copy)@@ -94,11 +94,13 @@ lst->InsertColumn(1, _("Version")); lst->InsertColumn(2, _("Installed")); lst->InsertColumn(3, _("Size"), wxLIST_FORMAT_RIGHT);+ lst->InsertColumn(4, _("Rev")); - lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3) - 2); // 1st column takes all remaining space+ lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3 + 40) - 6 ); // 1st column takes all remaining space lst->SetColumnWidth(1, 64); lst->SetColumnWidth(2, 64); lst->SetColumnWidth(3, 64);+ lst->SetColumnWidth(4, 40); } void UpdateDlg::AddRecordToList(UpdateRec* rec)@@ -111,8 +113,20 @@ lst->SetItem(idx, 1, rec->version); lst->SetItem(idx, 2, rec->installed_version); lst->SetItem(idx, 3, rec->size);+ lst->SetItem(idx, 4, rec->revision); } +wxString UpdateDlg::GetListColumnText(int idx, int col) {+ wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl);+ int index = idx == -1 ? lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED) : idx;+ wxListItem info;+ info.SetId(index);+ info.SetColumn(col);+ info.SetMask(wxLIST_MASK_TEXT);+ lst->GetItem(info);+ return info.GetText();+}+ void UpdateDlg::SetListColumnText(int idx, int col, const wxString& text) { wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl);@@ -393,7 +407,9 @@ if (index == -1) return 0; wxString title = lst->GetItemText(index);- return FindRecByTitle(title, m_Recs, m_RecsCount);+ wxString version = GetListColumnText(index, 1);+ wxString revision = GetListColumnText(index, 4);+ return FindRec(title, version, revision, m_Recs, m_RecsCount); } void UpdateDlg::DownloadFile(bool dontInstall)Index: src/plugins/contrib/devpak_plugin/manifest.xml===================================================================--- src/plugins/contrib/devpak_plugin/manifest.xml (revision 5106)+++ src/plugins/contrib/devpak_plugin/manifest.xml (working copy)@@ -2,18 +2,19 @@ <CodeBlocks_plugin_manifest_file> <SdkVersion major="1" minor="10" release="0" /> <Plugin name="DevPakUpdater">- <Value title="Dev-C++ DevPak updater/installer" />- <Value version="0.1" />+ <Value title="DevPak updater/installer" />+ <Value version="0.2" /> <Value description="Installs selected DevPaks from the Internet" /> <Value author="Yiannis Mandravellos" /> <Value authorEmail="info@codeblocks.org" /> <Value authorWebsite="http://www.codeblocks.org/" /> <Value thanksTo="Dev-C++ community.- Julian R Seward for libbzip2.- libbzip2 copyright notice:- bzip2 and associated library libbzip2, are- copyright (C) 1996-2000 Julian R Seward.- All rights reserved." />+ Julian R Seward for libbzip2.++ libbzip2 copyright notice:+ bzip2 and associated library libbzip2, are+ copyright (C) 1996-2000 Julian R Seward.+ All rights reserved." /> <Value license="GPL" /> </Plugin>-</CodeBlocks_plugin_manifest_file>+</CodeBlocks_plugin_manifest_file>\ No newline at end of file - - - - - - - line numbers start from 1 - - - diff --git a/patch.py b/patch.py deleted file mode 100755 index 4775d70..0000000 --- a/patch.py +++ /dev/null @@ -1,1203 +0,0 @@ -#!/usr/bin/env python -""" - Patch utility to apply unified diffs - - Brute-force line-by-line non-recursive parsing - - Copyright (c) 2008-2016 anatoly techtonik - Available under the terms of MIT license - -""" -from __future__ import print_function - -__author__ = "anatoly techtonik " -__version__ = "1.16" -__license__ = "MIT" -__url__ = "https://github.com/techtonik/python-patch" - -import copy -import logging -import re - -# cStringIO doesn't support unicode in 2.5 -try: - from StringIO import StringIO -except ImportError: - from io import BytesIO as StringIO # python 3 -try: - import urllib2 as urllib_request -except ImportError: - import urllib.request as urllib_request - -from os.path import exists, isfile, abspath -import os -import posixpath -import shutil -import sys - - -PY3K = sys.version_info >= (3, 0) - -# PEP 3114 -if not PY3K: - compat_next = lambda gen: gen.next() -else: - compat_next = lambda gen: gen.__next__() - -def tostr(b): - """ Python 3 bytes encoder. Used to print filename in - diffstat output. Assumes that filenames are in utf-8. - """ - if not PY3K: - return b - - # [ ] figure out how to print non-utf-8 filenames without - # information loss - return b.decode('utf-8') - - -#------------------------------------------------ -# Logging is controlled by logger named after the -# module name (e.g. 'patch' for patch.py module) - -logger = logging.getLogger(__name__) - -debug = logger.debug -info = logger.info -warning = logger.warning - -class NullHandler(logging.Handler): - """ Copied from Python 2.7 to avoid getting - `No handlers could be found for logger "patch"` - http://bugs.python.org/issue16539 - """ - def handle(self, record): - pass - def emit(self, record): - pass - def createLock(self): - self.lock = None - -streamhandler = logging.StreamHandler() - -# initialize logger itself -logger.addHandler(NullHandler()) - -debugmode = False - -def setdebug(): - global debugmode, streamhandler - - debugmode = True - loglevel = logging.DEBUG - logformat = "%(levelname)8s %(message)s" - logger.setLevel(loglevel) - - if streamhandler not in logger.handlers: - # when used as a library, streamhandler is not added - # by default - logger.addHandler(streamhandler) - - streamhandler.setFormatter(logging.Formatter(logformat)) - - -#------------------------------------------------ -# Constants for Patch/PatchSet types - -DIFF = PLAIN = "plain" -GIT = "git" -HG = MERCURIAL = "mercurial" -SVN = SUBVERSION = "svn" -# mixed type is only actual when PatchSet contains -# Patches of different type -MIXED = MIXED = "mixed" - - -#------------------------------------------------ -# Helpers (these could come with Python stdlib) - -# x...() function are used to work with paths in -# cross-platform manner - all paths use forward -# slashes even on Windows. - -def xisabs(filename): - """ Cross-platform version of `os.path.isabs()` - Returns True if `filename` is absolute on - Linux, OS X or Windows. - """ - if filename.startswith(b'/'): # Linux/Unix - return True - elif filename.startswith(b'\\'): # Windows - return True - elif re.match(b'\\w:[\\\\/]', filename): # Windows - return True - return False - -def xnormpath(path): - """ Cross-platform version of os.path.normpath """ - # replace escapes and Windows slashes - normalized = posixpath.normpath(path).replace(b'\\', b'/') - # fold the result - return posixpath.normpath(normalized) - -def xstrip(filename): - """ Make relative path out of absolute by stripping - prefixes used on Linux, OS X and Windows. - - This function is critical for security. - """ - while xisabs(filename): - # strip windows drive with all slashes - if re.match(b'\\w:[\\\\/]', filename): - filename = re.sub(b'^\\w+:[\\\\/]+', b'', filename) - # strip all slashes - elif re.match(b'[\\\\/]', filename): - filename = re.sub(b'^[\\\\/]+', b'', filename) - return filename - -#----------------------------------------------- -# Main API functions - -def fromfile(filename): - """ Parse patch file. If successful, returns - PatchSet() object. Otherwise returns False. - """ - patchset = PatchSet() - debug("reading %s" % filename) - fp = open(filename, "rb") - res = patchset.parse(fp) - fp.close() - if res == True: - return patchset - return False - - -def fromstring(s): - """ Parse text string and return PatchSet() - object (or False if parsing fails) - """ - ps = PatchSet( StringIO(s) ) - if ps.errors == 0: - return ps - return False - - -def fromurl(url): - """ Parse patch from an URL, return False - if an error occured. Note that this also - can throw urlopen() exceptions. - """ - ps = PatchSet( urllib_request.urlopen(url) ) - if ps.errors == 0: - return ps - return False - - -# --- Utility functions --- -# [ ] reuse more universal pathsplit() -def pathstrip(path, n): - """ Strip n leading components from the given path """ - pathlist = [path] - while os.path.dirname(pathlist[0]) != b'': - pathlist[0:1] = os.path.split(pathlist[0]) - return b'/'.join(pathlist[n:]) -# --- /Utility function --- - - -class Hunk(object): - """ Parsed hunk data container (hunk starts with @@ -R +R @@) """ - - def __init__(self): - self.startsrc=None #: line count starts with 1 - self.linessrc=None - self.starttgt=None - self.linestgt=None - self.invalid=False - self.desc='' - self.text=[] - -# def apply(self, estream): -# """ write hunk data into enumerable stream -# return strings one by one until hunk is -# over -# -# enumerable stream are tuples (lineno, line) -# where lineno starts with 0 -# """ -# pass - - -class Patch(object): - """ Patch for a single file. - If used as an iterable, returns hunks. - """ - def __init__(self): - self.source = None - self.target = None - self.hunks = [] - self.hunkends = [] - self.header = [] - - self.type = None - - def __iter__(self): - for h in self.hunks: - yield h - - -class PatchSet(object): - """ PatchSet is a patch parser and container. - When used as an iterable, returns patches. - """ - - def __init__(self, stream=None): - # --- API accessible fields --- - - # name of the PatchSet (filename or ...) - self.name = None - # patch set type - one of constants - self.type = None - - # list of Patch objects - self.items = [] - - self.errors = 0 # fatal parsing errors - self.warnings = 0 # non-critical warnings - # --- /API --- - - if stream: - self.parse(stream) - - def __len__(self): - return len(self.items) - - def __iter__(self): - for i in self.items: - yield i - - def parse(self, stream): - """ parse unified diff - return True on success - """ - lineends = dict(lf=0, crlf=0, cr=0) - nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1 - - p = None - hunk = None - # hunkactual variable is used to calculate hunk lines for comparison - hunkactual = dict(linessrc=None, linestgt=None) - - - class wrapumerate(enumerate): - """Enumerate wrapper that uses boolean end of stream status instead of - StopIteration exception, and properties to access line information. - """ - - def __init__(self, *args, **kwargs): - # we don't call parent, it is magically created by __new__ method - - self._exhausted = False - self._lineno = False # after end of stream equal to the num of lines - self._line = False # will be reset to False after end of stream - - def next(self): - """Try to read the next line and return True if it is available, - False if end of stream is reached.""" - if self._exhausted: - return False - - try: - self._lineno, self._line = compat_next(super(wrapumerate, self)) - except StopIteration: - self._exhausted = True - self._line = False - return False - return True - - @property - def is_empty(self): - return self._exhausted - - @property - def line(self): - return self._line - - @property - def lineno(self): - return self._lineno - - # define states (possible file regions) that direct parse flow - headscan = True # start with scanning header - filenames = False # lines starting with --- and +++ - - hunkhead = False # @@ -R +R @@ sequence - hunkbody = False # - hunkskip = False # skipping invalid hunk mode - - hunkparsed = False # state after successfully parsed hunk - - # regexp to match start of hunk, used groups - 1,3,4,6 - re_hunk_start = re.compile(br"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") - - self.errors = 0 - # temp buffers for header and filenames info - header = [] - srcname = None - tgtname = None - - # start of main cycle - # each parsing block already has line available in fe.line - fe = wrapumerate(stream) - while fe.next(): - - # -- deciders: these only switch state to decide who should process - # -- line fetched at the start of this cycle - if hunkparsed: - hunkparsed = False - if re_hunk_start.match(fe.line): - hunkhead = True - elif fe.line.startswith(b"--- "): - filenames = True - else: - headscan = True - # -- ------------------------------------ - - # read out header - if headscan: - while not fe.is_empty and not fe.line.startswith(b"--- "): - header.append(fe.line) - fe.next() - if fe.is_empty: - if p == None: - debug("no patch data found") # error is shown later - self.errors += 1 - else: - info("%d unparsed bytes left at the end of stream" % len(b''.join(header))) - self.warnings += 1 - # TODO check for \No new line at the end.. - # TODO test for unparsed bytes - # otherwise error += 1 - # this is actually a loop exit - continue - - headscan = False - # switch to filenames state - filenames = True - - line = fe.line - lineno = fe.lineno - - - # hunkskip and hunkbody code skipped until definition of hunkhead is parsed - if hunkbody: - # [x] treat empty lines inside hunks as containing single space - # (this happens when diff is saved by copy/pasting to editor - # that strips trailing whitespace) - if line.strip(b"\r\n") == b"": - debug("expanding empty line in a middle of hunk body") - self.warnings += 1 - line = b' ' + line - - # process line first - if re.match(b"^[- \\+\\\\]", line): - # gather stats about line endings - if line.endswith(b"\r\n"): - p.hunkends["crlf"] += 1 - elif line.endswith(b"\n"): - p.hunkends["lf"] += 1 - elif line.endswith(b"\r"): - p.hunkends["cr"] += 1 - - if line.startswith(b"-"): - hunkactual["linessrc"] += 1 - elif line.startswith(b"+"): - hunkactual["linestgt"] += 1 - elif not line.startswith(b"\\"): - hunkactual["linessrc"] += 1 - hunkactual["linestgt"] += 1 - hunk.text.append(line) - # todo: handle \ No newline cases - else: - warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target)) - # add hunk status node - hunk.invalid = True - p.hunks.append(hunk) - self.errors += 1 - # switch to hunkskip state - hunkbody = False - hunkskip = True - - # check exit conditions - if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt: - warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target)) - # add hunk status node - hunk.invalid = True - p.hunks.append(hunk) - self.errors += 1 - # switch to hunkskip state - hunkbody = False - hunkskip = True - elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]: - # hunk parsed successfully - p.hunks.append(hunk) - # switch to hunkparsed state - hunkbody = False - hunkparsed = True - - # detect mixed window/unix line ends - ends = p.hunkends - if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1: - warning("inconsistent line ends in patch hunks for %s" % p.source) - self.warnings += 1 - if debugmode: - debuglines = dict(ends) - debuglines.update(file=p.target, hunk=nexthunkno) - debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines) - # fetch next line - continue - - if hunkskip: - if re_hunk_start.match(line): - # switch to hunkhead state - hunkskip = False - hunkhead = True - elif line.startswith(b"--- "): - # switch to filenames state - hunkskip = False - filenames = True - if debugmode and len(self.items) > 0: - debug("- %2d hunks for %s" % (len(p.hunks), p.source)) - - if filenames: - if line.startswith(b"--- "): - if srcname != None: - # XXX testcase - warning("skipping false patch for %s" % srcname) - srcname = None - # XXX header += srcname - # double source filename line is encountered - # attempt to restart from this second line - re_filename = b"^--- ([^\t]+)" - match = re.match(re_filename, line) - # todo: support spaces in filenames - if match: - srcname = match.group(1).strip() - else: - warning("skipping invalid filename at line %d" % (lineno+1)) - self.errors += 1 - # XXX p.header += line - # switch back to headscan state - filenames = False - headscan = True - elif not line.startswith(b"+++ "): - if srcname != None: - warning("skipping invalid patch with no target for %s" % srcname) - self.errors += 1 - srcname = None - # XXX header += srcname - # XXX header += line - else: - # this should be unreachable - warning("skipping invalid target patch") - filenames = False - headscan = True - else: - if tgtname != None: - # XXX seems to be a dead branch - warning("skipping invalid patch - double target at line %d" % (lineno+1)) - self.errors += 1 - srcname = None - tgtname = None - # XXX header += srcname - # XXX header += tgtname - # XXX header += line - # double target filename line is encountered - # switch back to headscan state - filenames = False - headscan = True - else: - re_filename = br"^\+\+\+ ([^\t]+)" - match = re.match(re_filename, line) - if not match: - warning("skipping invalid patch - no target filename at line %d" % (lineno+1)) - self.errors += 1 - srcname = None - # switch back to headscan state - filenames = False - headscan = True - else: - if p: # for the first run p is None - self.items.append(p) - p = Patch() - p.source = srcname - srcname = None - p.target = match.group(1).strip() - p.header = header - header = [] - # switch to hunkhead state - filenames = False - hunkhead = True - nexthunkno = 0 - p.hunkends = lineends.copy() - continue - - if hunkhead: - match = re.match(br"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) - if not match: - if not p.hunks: - warning("skipping invalid patch with no hunks for file %s" % p.source) - self.errors += 1 - # XXX review switch - # switch to headscan state - hunkhead = False - headscan = True - continue - else: - # TODO review condition case - # switch to headscan state - hunkhead = False - headscan = True - else: - hunk = Hunk() - hunk.startsrc = int(match.group(1)) - hunk.linessrc = 1 - if match.group(3): hunk.linessrc = int(match.group(3)) - hunk.starttgt = int(match.group(4)) - hunk.linestgt = 1 - if match.group(6): hunk.linestgt = int(match.group(6)) - hunk.invalid = False - hunk.desc = match.group(7)[1:].rstrip() - hunk.text = [] - - hunkactual["linessrc"] = hunkactual["linestgt"] = 0 - - # switch to hunkbody state - hunkhead = False - hunkbody = True - nexthunkno += 1 - continue - - # /while fe.next() - - if p: - self.items.append(p) - - if not hunkparsed: - if hunkskip: - warning("warning: finished with errors, some hunks may be invalid") - elif headscan: - if len(self.items) == 0: - warning("error: no patch data found!") - return False - else: # extra data at the end of file - pass - else: - warning("error: patch stream is incomplete!") - self.errors += 1 - if len(self.items) == 0: - return False - - if debugmode and len(self.items) > 0: - debug("- %2d hunks for %s" % (len(p.hunks), p.source)) - - # XXX fix total hunks calculation - debug("total files: %d total hunks: %d" % (len(self.items), - sum(len(p.hunks) for p in self.items))) - - # ---- detect patch and patchset types ---- - for idx, p in enumerate(self.items): - self.items[idx].type = self._detect_type(p) - - types = set([p.type for p in self.items]) - if len(types) > 1: - self.type = MIXED - else: - self.type = types.pop() - # -------- - - self._normalize_filenames() - - return (self.errors == 0) - - def _detect_type(self, p): - """ detect and return type for the specified Patch object - analyzes header and filenames info - - NOTE: must be run before filenames are normalized - """ - - # check for SVN - # - header starts with Index: - # - next line is ===... delimiter - # - filename is followed by revision number - # TODO add SVN revision - if (len(p.header) > 1 and p.header[-2].startswith(b"Index: ") - and p.header[-1].startswith(b"="*67)): - return SVN - - # common checks for both HG and GIT - DVCS = ((p.source.startswith(b'a/') or p.source == b'/dev/null') - and (p.target.startswith(b'b/') or p.target == b'/dev/null')) - - # GIT type check - # - header[-2] is like "diff --git a/oldname b/newname" - # - header[-1] is like "index .. " - # TODO add git rename diffs and add/remove diffs - # add git diff with spaced filename - # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html - - # Git patch header len is 2 min - if len(p.header) > 1: - # detect the start of diff header - there might be some comments before - for idx in reversed(range(len(p.header))): - if p.header[idx].startswith(b"diff --git"): - break - if p.header[idx].startswith(b'diff --git a/'): - if (idx+1 < len(p.header) - and re.match(b'index \\w{7}..\\w{7} \\d{6}', p.header[idx+1])): - if DVCS: - return GIT - - # HG check - # - # - for plain HG format header is like "diff -r b2d9961ff1f5 filename" - # - for Git-style HG patches it is "diff --git a/oldname b/newname" - # - filename starts with a/, b/ or is equal to /dev/null - # - exported changesets also contain the header - # # HG changeset patch - # # User name@example.com - # ... - # TODO add MQ - # TODO add revision info - if len(p.header) > 0: - if DVCS and re.match(b'diff -r \\w{12} .*', p.header[-1]): - return HG - if DVCS and p.header[-1].startswith(b'diff --git a/'): - if len(p.header) == 1: # native Git patch header len is 2 - return HG - elif p.header[0].startswith(b'# HG changeset patch'): - return HG - - return PLAIN - - - def _normalize_filenames(self): - """ sanitize filenames, normalizing paths, i.e.: - 1. strip a/ and b/ prefixes from GIT and HG style patches - 2. remove all references to parent directories (with warning) - 3. translate any absolute paths to relative (with warning) - - [x] always use forward slashes to be crossplatform - (diff/patch were born as a unix utility after all) - - return None - """ - if debugmode: - debug("normalize filenames") - for i,p in enumerate(self.items): - if debugmode: - debug(" patch type = " + p.type) - debug(" source = " + p.source) - debug(" target = " + p.target) - if p.type in (HG, GIT): - # TODO: figure out how to deal with /dev/null entries - debug("stripping a/ and b/ prefixes") - if p.source != '/dev/null': - if not p.source.startswith(b"a/"): - warning("invalid source filename") - else: - p.source = p.source[2:] - if p.target != '/dev/null': - if not p.target.startswith(b"b/"): - warning("invalid target filename") - else: - p.target = p.target[2:] - - p.source = xnormpath(p.source) - p.target = xnormpath(p.target) - - sep = b'/' # sep value can be hardcoded, but it looks nice this way - - # references to parent are not allowed - if p.source.startswith(b".." + sep): - warning("error: stripping parent path for source file patch no.%d" % (i+1)) - self.warnings += 1 - while p.source.startswith(b".." + sep): - p.source = p.source.partition(sep)[2] - if p.target.startswith(b".." + sep): - warning("error: stripping parent path for target file patch no.%d" % (i+1)) - self.warnings += 1 - while p.target.startswith(b".." + sep): - p.target = p.target.partition(sep)[2] - # absolute paths are not allowed - if xisabs(p.source) or xisabs(p.target): - warning("error: absolute paths are not allowed - file no.%d" % (i+1)) - self.warnings += 1 - if xisabs(p.source): - warning("stripping absolute path from source name '%s'" % p.source) - p.source = xstrip(p.source) - if xisabs(p.target): - warning("stripping absolute path from target name '%s'" % p.target) - p.target = xstrip(p.target) - - self.items[i].source = p.source - self.items[i].target = p.target - - - def diffstat(self): - """ calculate diffstat and return as a string - Notes: - - original diffstat ouputs target filename - - single + or - shouldn't escape histogram - """ - names = [] - insert = [] - delete = [] - delta = 0 # size change in bytes - namelen = 0 - maxdiff = 0 # max number of changes for single file - # (for histogram width calculation) - for patch in self.items: - i,d = 0,0 - for hunk in patch.hunks: - for line in hunk.text: - if line.startswith(b'+'): - i += 1 - delta += len(line)-1 - elif line.startswith(b'-'): - d += 1 - delta -= len(line)-1 - names.append(patch.target) - insert.append(i) - delete.append(d) - namelen = max(namelen, len(patch.target)) - maxdiff = max(maxdiff, i+d) - output = '' - statlen = len(str(maxdiff)) # stats column width - for i,n in enumerate(names): - # %-19s | %-4d %s - format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n" - - hist = '' - # -- calculating histogram -- - width = len(format % ('', '', '')) - histwidth = max(2, 80 - width) - if maxdiff < histwidth: - hist = "+"*insert[i] + "-"*delete[i] - else: - iratio = (float(insert[i]) / maxdiff) * histwidth - dratio = (float(delete[i]) / maxdiff) * histwidth - - # make sure every entry gets at least one + or - - iwidth = 1 if 0 < iratio < 1 else int(iratio) - dwidth = 1 if 0 < dratio < 1 else int(dratio) - #print(iratio, dratio, iwidth, dwidth, histwidth) - hist = "+"*int(iwidth) + "-"*int(dwidth) - # -- /calculating +- histogram -- - output += (format % (tostr(names[i]), str(insert[i] + delete[i]), hist)) - - output += (" %d files changed, %d insertions(+), %d deletions(-), %+d bytes" - % (len(names), sum(insert), sum(delete), delta)) - return output - - - def findfile(self, old, new): - """ return name of file to be patched or None """ - if exists(old): - return old - elif exists(new): - return new - else: - # [w] Google Code generates broken patches with its online editor - debug("broken patch from Google Code, stripping prefixes..") - if old.startswith(b'a/') and new.startswith(b'b/'): - old, new = old[2:], new[2:] - debug(" %s" % old) - debug(" %s" % new) - if exists(old): - return old - elif exists(new): - return new - return None - - - def apply(self, strip=0, root=None): - """ Apply parsed patch, optionally stripping leading components - from file paths. `root` parameter specifies working dir. - return True on success - """ - if root: - prevdir = os.getcwd() - os.chdir(root) - - total = len(self.items) - errors = 0 - if strip: - # [ ] test strip level exceeds nesting level - # [ ] test the same only for selected files - # [ ] test if files end up being on the same level - try: - strip = int(strip) - except ValueError: - errors += 1 - warning("error: strip parameter '%s' must be an integer" % strip) - strip = 0 - - #for fileno, filename in enumerate(self.source): - for i,p in enumerate(self.items): - if strip: - debug("stripping %s leading component(s) from:" % strip) - debug(" %s" % p.source) - debug(" %s" % p.target) - old = pathstrip(p.source, strip) - new = pathstrip(p.target, strip) - else: - old, new = p.source, p.target - - filename = self.findfile(old, new) - - if not filename: - warning("source/target file does not exist:\n --- %s\n +++ %s" % (old, new)) - errors += 1 - continue - if not isfile(filename): - warning("not a file - %s" % filename) - errors += 1 - continue - - # [ ] check absolute paths security here - debug("processing %d/%d:\t %s" % (i+1, total, filename)) - - # validate before patching - f2fp = open(filename, 'rb') - hunkno = 0 - hunk = p.hunks[hunkno] - hunkfind = [] - hunkreplace = [] - validhunks = 0 - canpatch = False - for lineno, line in enumerate(f2fp): - if lineno+1 < hunk.startsrc: - continue - elif lineno+1 == hunk.startsrc: - hunkfind = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" -"] - hunkreplace = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" +"] - #pprint(hunkreplace) - hunklineno = 0 - - # todo \ No newline at end of file - - # check hunks in source file - if lineno+1 < hunk.startsrc+len(hunkfind)-1: - if line.rstrip(b"\r\n") == hunkfind[hunklineno]: - hunklineno+=1 - else: - info("file %d/%d:\t %s" % (i+1, total, filename)) - info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno+1)) - info(" expected: %s" % hunkfind[hunklineno]) - info(" actual : %s" % line.rstrip(b"\r\n")) - # not counting this as error, because file may already be patched. - # check if file is already patched is done after the number of - # invalid hunks if found - # TODO: check hunks against source/target file in one pass - # API - check(stream, srchunks, tgthunks) - # return tuple (srcerrs, tgterrs) - - # continue to check other hunks for completeness - hunkno += 1 - if hunkno < len(p.hunks): - hunk = p.hunks[hunkno] - continue - else: - break - - # check if processed line is the last line - if lineno+1 == hunk.startsrc+len(hunkfind)-1: - debug(" hunk no.%d for file %s -- is ready to be patched" % (hunkno+1, filename)) - hunkno+=1 - validhunks+=1 - if hunkno < len(p.hunks): - hunk = p.hunks[hunkno] - else: - if validhunks == len(p.hunks): - # patch file - canpatch = True - break - else: - if hunkno < len(p.hunks): - warning("premature end of source file %s at hunk %d" % (filename, hunkno+1)) - errors += 1 - - f2fp.close() - - if validhunks < len(p.hunks): - if self._match_file_hunks(filename, p.hunks): - warning("already patched %s" % filename) - else: - warning("source file is different - %s" % filename) - errors += 1 - if canpatch: - backupname = filename+b".orig" - if exists(backupname): - warning("can't backup original file to %s - aborting" % backupname) - else: - import shutil - shutil.move(filename, backupname) - if self.write_hunks(backupname, filename, p.hunks): - info("successfully patched %d/%d:\t %s" % (i+1, total, filename)) - os.unlink(backupname) - else: - errors += 1 - warning("error patching file %s" % filename) - shutil.copy(filename, filename+".invalid") - warning("invalid version is saved to %s" % filename+".invalid") - # todo: proper rejects - shutil.move(backupname, filename) - - if root: - os.chdir(prevdir) - - # todo: check for premature eof - return (errors == 0) - - - def _reverse(self): - """ reverse patch direction (this doesn't touch filenames) """ - for p in self.items: - for h in p.hunks: - h.startsrc, h.starttgt = h.starttgt, h.startsrc - h.linessrc, h.linestgt = h.linestgt, h.linessrc - for i,line in enumerate(h.text): - # need to use line[0:1] here, because line[0] - # returns int instead of bytes on Python 3 - if line[0:1] == b'+': - h.text[i] = b'-' + line[1:] - elif line[0:1] == b'-': - h.text[i] = b'+' +line[1:] - - def revert(self, strip=0, root=None): - """ apply patch in reverse order """ - reverted = copy.deepcopy(self) - reverted._reverse() - return reverted.apply(strip, root) - - - def can_patch(self, filename): - """ Check if specified filename can be patched. Returns None if file can - not be found among source filenames. False if patch can not be applied - clearly. True otherwise. - - :returns: True, False or None - """ - filename = abspath(filename) - for p in self.items: - if filename == abspath(p.source): - return self._match_file_hunks(filename, p.hunks) - return None - - - def _match_file_hunks(self, filepath, hunks): - matched = True - fp = open(abspath(filepath), 'rb') - - class NoMatch(Exception): - pass - - lineno = 1 - line = fp.readline() - hno = None - try: - for hno, h in enumerate(hunks): - # skip to first line of the hunk - while lineno < h.starttgt: - if not len(line): # eof - debug("check failed - premature eof before hunk: %d" % (hno+1)) - raise NoMatch - line = fp.readline() - lineno += 1 - for hline in h.text: - if hline.startswith(b"-"): - continue - if not len(line): - debug("check failed - premature eof on hunk: %d" % (hno+1)) - # todo: \ No newline at the end of file - raise NoMatch - if line.rstrip(b"\r\n") != hline[1:].rstrip(b"\r\n"): - debug("file is not patched - failed hunk: %d" % (hno+1)) - raise NoMatch - line = fp.readline() - lineno += 1 - - except NoMatch: - matched = False - # todo: display failed hunk, i.e. expected/found - - fp.close() - return matched - - - def patch_stream(self, instream, hunks): - """ Generator that yields stream patched with hunks iterable - - Converts lineends in hunk lines to the best suitable format - autodetected from input - """ - - # todo: At the moment substituted lineends may not be the same - # at the start and at the end of patching. Also issue a - # warning/throw about mixed lineends (is it really needed?) - - hunks = iter(hunks) - - srclineno = 1 - - lineends = {b'\n':0, b'\r\n':0, b'\r':0} - def get_line(): - """ - local utility function - return line from source stream - collecting line end statistics on the way - """ - line = instream.readline() - # 'U' mode works only with text files - if line.endswith(b"\r\n"): - lineends[b"\r\n"] += 1 - elif line.endswith(b"\n"): - lineends[b"\n"] += 1 - elif line.endswith(b"\r"): - lineends[b"\r"] += 1 - return line - - for hno, h in enumerate(hunks): - debug("hunk %d" % (hno+1)) - # skip to line just before hunk starts - while srclineno < h.startsrc: - yield get_line() - srclineno += 1 - - for hline in h.text: - # todo: check \ No newline at the end of file - if hline.startswith(b"-") or hline.startswith(b"\\"): - get_line() - srclineno += 1 - continue - else: - if not hline.startswith(b"+"): - get_line() - srclineno += 1 - line2write = hline[1:] - # detect if line ends are consistent in source file - if sum([bool(lineends[x]) for x in lineends]) == 1: - newline = [x for x in lineends if lineends[x] != 0][0] - yield line2write.rstrip(b"\r\n")+newline - else: # newlines are mixed - yield line2write - - for line in instream: - yield line - - - def write_hunks(self, srcname, tgtname, hunks): - src = open(srcname, "rb") - tgt = open(tgtname, "wb") - - debug("processing target file %s" % tgtname) - - tgt.writelines(self.patch_stream(src, hunks)) - - tgt.close() - src.close() - # [ ] TODO: add test for permission copy - shutil.copymode(srcname, tgtname) - return True - - - def dump(self): - for p in self.items: - for headline in p.header: - print(headline.rstrip('\n')) - print('--- ' + p.source) - print('+++ ' + p.target) - for h in p.hunks: - print('@@ -%s,%s +%s,%s @@' % (h.startsrc, h.linessrc, h.starttgt, h.linestgt)) - for line in h.text: - print(line.rstrip('\n')) - - -def main(): - from optparse import OptionParser - from os.path import exists - import sys - - opt = OptionParser(usage="1. %prog [options] unified.diff\n" - " 2. %prog [options] http://host/patch\n" - " 3. %prog [options] -- < unified.diff", - version="python-patch %s" % __version__) - opt.add_option("-q", "--quiet", action="store_const", dest="verbosity", - const=0, help="print only warnings and errors", default=1) - opt.add_option("-v", "--verbose", action="store_const", dest="verbosity", - const=2, help="be verbose") - opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode") - opt.add_option("--diffstat", action="store_true", dest="diffstat", - help="print diffstat and exit") - opt.add_option("-d", "--directory", metavar='DIR', - help="specify root directory for applying patch") - opt.add_option("-p", "--strip", type="int", metavar='N', default=0, - help="strip N path components from filenames") - opt.add_option("--revert", action="store_true", - help="apply patch in reverse order (unpatch)") - (options, args) = opt.parse_args() - - if not args and sys.argv[-1:] != ['--']: - opt.print_version() - opt.print_help() - sys.exit() - readstdin = (sys.argv[-1:] == ['--'] and not args) - - verbosity_levels = {0:logging.WARNING, 1:logging.INFO, 2:logging.DEBUG} - loglevel = verbosity_levels[options.verbosity] - logformat = "%(message)s" - logger.setLevel(loglevel) - streamhandler.setFormatter(logging.Formatter(logformat)) - - if options.debugmode: - setdebug() # this sets global debugmode variable - - if readstdin: - patch = PatchSet(sys.stdin) - else: - patchfile = args[0] - urltest = patchfile.split(':')[0] - if (':' in patchfile and urltest.isalpha() - and len(urltest) > 1): # one char before : is a windows drive letter - patch = fromurl(patchfile) - else: - if not exists(patchfile) or not isfile(patchfile): - sys.exit("patch file does not exist - %s" % patchfile) - patch = fromfile(patchfile) - - if options.diffstat: - print(patch.diffstat()) - sys.exit(0) - - #pprint(patch) - if options.revert: - patch.revert(options.strip, root=options.directory) or sys.exit(-1) - else: - patch.apply(options.strip, root=options.directory) or sys.exit(-1) - - # todo: document and test line ends handling logic - patch.py detects proper line-endings - # for inserted hunks and issues a warning if patched file has incosistent line ends - - -if __name__ == "__main__": - main() - -# Legend: -# [ ] - some thing to be done -# [w] - official wart, external or internal that is unlikely to be fixed - -# [ ] API break (2.x) wishlist -# PatchSet.items --> PatchSet.patches - -# [ ] run --revert test for all dataset items -# [ ] run .parse() / .dump() test for dataset diff --git a/tests/01uni_multi/01uni_multi.patch b/tests/01uni_multi/01uni_multi.patch deleted file mode 100644 index ba5939c..0000000 --- a/tests/01uni_multi/01uni_multi.patch +++ /dev/null @@ -1,180 +0,0 @@ -Index: updatedlg.cpp -=================================================================== ---- updatedlg.cpp (revision 5095) -+++ updatedlg.cpp (working copy) -@@ -94,11 +94,13 @@ - lst->InsertColumn(1, _("Version")); - lst->InsertColumn(2, _("Installed")); - lst->InsertColumn(3, _("Size"), wxLIST_FORMAT_RIGHT); -+ lst->InsertColumn(4, _("Rev")); - -- lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3) - 2); // 1st column takes all remaining space -+ lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3 + 40) - 6 ); // 1st column takes all remaining space - lst->SetColumnWidth(1, 64); - lst->SetColumnWidth(2, 64); - lst->SetColumnWidth(3, 64); -+ lst->SetColumnWidth(4, 40); - } - - void UpdateDlg::AddRecordToList(UpdateRec* rec) -@@ -111,8 +113,20 @@ - lst->SetItem(idx, 1, rec->version); - lst->SetItem(idx, 2, rec->installed_version); - lst->SetItem(idx, 3, rec->size); -+ lst->SetItem(idx, 4, rec->revision); - } - -+wxString UpdateDlg::GetListColumnText(int idx, int col) { -+ wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); -+ int index = idx == -1 ? lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED) : idx; -+ wxListItem info; -+ info.SetId(index); -+ info.SetColumn(col); -+ info.SetMask(wxLIST_MASK_TEXT); -+ lst->GetItem(info); -+ return info.GetText(); -+} -+ - void UpdateDlg::SetListColumnText(int idx, int col, const wxString& text) - { - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); -@@ -393,7 +407,9 @@ - if (index == -1) - return 0; - wxString title = lst->GetItemText(index); -- return FindRecByTitle(title, m_Recs, m_RecsCount); -+ wxString version = GetListColumnText(index, 1); -+ wxString revision = GetListColumnText(index, 4); -+ return FindRec(title, version, revision, m_Recs, m_RecsCount); - } - - void UpdateDlg::DownloadFile(bool dontInstall) -Index: updatedlg.h -=================================================================== ---- updatedlg.h (revision 5095) -+++ updatedlg.h (working copy) -@@ -49,6 +49,7 @@ - UpdateRec* GetRecFromListView(); - void CreateListColumns(); - void AddRecordToList(UpdateRec* rec); -+ wxString GetListColumnText(int idx, int col); - void SetListColumnText(int idx, int col, const wxString& text); - - wxString GetConfFilename(); -Index: manifest.xml -=================================================================== ---- manifest.xml (revision 5095) -+++ manifest.xml (working copy) -@@ -2,18 +2,19 @@ - - - -- -- -+ -+ - - - - - -+ Julian R Seward for libbzip2. -+ -+ libbzip2 copyright notice: -+ bzip2 and associated library libbzip2, are -+ copyright (C) 1996-2000 Julian R Seward. -+ All rights reserved." /> - - - -Index: conf.cpp -=================================================================== ---- conf.cpp (revision 5095) -+++ conf.cpp (working copy) -@@ -46,10 +46,16 @@ - // fix title - // devpaks.org has changed the title to contain some extra info - // e.g.: [libunicows Library version: 1.1.1 Devpak revision: 1sid] -- // we don't need this extra info, so if we find it we remove it -- int pos = rec.title.Find(_T("Library version:")); -+ int pos = rec.title.Lower().Find(_T("library version:")); - if (pos != -1) - { -+ int revpos = rec.title.Lower().Find(_T("devpak revision:")); -+ if (revpos != -1) { -+ rec.revision = rec.title.Mid(revpos).AfterFirst(_T(':')).Trim(false); -+ rec.revision.Replace(_T("\t"), _T(" ")); -+ rec.revision = rec.revision.BeforeFirst(_T(' ')); -+ } -+ - rec.title.Truncate(pos); - rec.title = rec.title.Trim(false); - rec.title = rec.title.Trim(true); -@@ -60,7 +66,7 @@ - rec.remote_file = ini.GetKeyValue(i, _T("RemoteFilename")); - rec.local_file = ini.GetKeyValue(i, _T("LocalFilename")); - rec.groups = GetArrayFromString(ini.GetKeyValue(i, _T("Group")), _T(",")); -- rec.install = ini.GetKeyValue(i, _T("InstallPath")); -+ rec.install_path = ini.GetKeyValue(i, _T("InstallPath")); - rec.version = ini.GetKeyValue(i, _T("Version")); - ini.GetKeyValue(i, _T("Size")).ToLong(&rec.bytes); - rec.date = ini.GetKeyValue(i, _T("Date")); -@@ -99,12 +105,17 @@ - return list; - } - --UpdateRec* FindRecByTitle(const wxString& title, UpdateRec* list, int count) -+UpdateRec* FindRec(const wxString& title, const wxString& version, const wxString& revision, UpdateRec* list, int count) - { - for (int i = 0; i < count; ++i) - { -- if (list[i].title == title) -- return &list[i]; -+ if (list[i].title == title && list[i].version == version) { -+ if (revision.IsEmpty()) { -+ return &list[i]; -+ } else if (list[i].revision == revision) { -+ return &list[i]; -+ } -+ } - } - return 0; - } -Index: conf.h -=================================================================== ---- conf.h (revision 5095) -+++ conf.h (working copy) -@@ -7,7 +7,7 @@ - - struct UpdateRec - { -- wxString entry; -+ wxString entry; //! .entry filename for installed - wxString title; - wxString name; - wxString desc; -@@ -15,8 +15,9 @@ - wxString remote_file; - wxString local_file; - wxArrayString groups; -- wxString install; -+ wxString install_path; //! ignored - wxString version; -+ wxString revision; - wxString installed_version; - long int bytes; - float kilobytes; -@@ -31,7 +32,7 @@ - extern wxString g_MasterPath; - - UpdateRec* ReadConf(const IniParser& ini, int* recCount, const wxString& currentServer, const wxString& appPath); --UpdateRec* FindRecByTitle(const wxString& title, UpdateRec* list, int count); -+UpdateRec* FindRec(const wxString& title, const wxString& version, const wxString& revision, UpdateRec* list, int count); - // utility - wxString GetSizeString(int bytes); - diff --git a/tests/01uni_multi/[result]/conf.cpp b/tests/01uni_multi/[result]/conf.cpp deleted file mode 100644 index bdd5349..0000000 --- a/tests/01uni_multi/[result]/conf.cpp +++ /dev/null @@ -1,121 +0,0 @@ -/* - * This file is part of the Code::Blocks IDE and licensed under the GNU General Public License, version 3 - * http://www.gnu.org/licenses/gpl-3.0.html - * - * $Revision: 4909 $ - * $Id: conf.cpp 4909 2008-02-27 13:15:26Z mortenmacfly $ - * $HeadURL: http://svn.berlios.de/svnroot/repos/codeblocks/tags/8.02/src/plugins/contrib/devpak_plugin/conf.cpp $ - */ - -#include "conf.h" -#include -#include -#include -#include - -wxString g_MasterPath; - -wxString GetSizeString(int bytes) -{ - wxString ret; - float kilobytes = (float)bytes / 1024.0f; - float megabytes = kilobytes / 1024.0f; - if (megabytes >= 1.0f) - ret.Printf(_("%.2f MB"), megabytes); - else if (kilobytes >= 1.0f) - ret.Printf(_("%.2f KB"), kilobytes); - else - ret.Printf(_("%ld bytes"), bytes); - return ret; -} - -UpdateRec* ReadConf(const IniParser& ini, int* recCount, const wxString& currentServer, const wxString& appPath) -{ - *recCount = 0; - int groupsCount = ini.GetGroupsCount(); - if (groupsCount == 0) - return 0; - - UpdateRec* list = new UpdateRec[ini.GetGroupsCount()]; - for (int i = 0; i < groupsCount; ++i) - { - UpdateRec& rec = list[i]; - - rec.title = ini.GetGroupName(i); - - // fix title - // devpaks.org has changed the title to contain some extra info - // e.g.: [libunicows Library version: 1.1.1 Devpak revision: 1sid] - int pos = rec.title.Lower().Find(_T("library version:")); - if (pos != -1) - { - int revpos = rec.title.Lower().Find(_T("devpak revision:")); - if (revpos != -1) { - rec.revision = rec.title.Mid(revpos).AfterFirst(_T(':')).Trim(false); - rec.revision.Replace(_T("\t"), _T(" ")); - rec.revision = rec.revision.BeforeFirst(_T(' ')); - } - - rec.title.Truncate(pos); - rec.title = rec.title.Trim(false); - rec.title = rec.title.Trim(true); - } - - rec.name = ini.GetKeyValue(i, _T("Name")); - rec.desc = ini.GetKeyValue(i, _T("Description")); - rec.remote_file = ini.GetKeyValue(i, _T("RemoteFilename")); - rec.local_file = ini.GetKeyValue(i, _T("LocalFilename")); - rec.groups = GetArrayFromString(ini.GetKeyValue(i, _T("Group")), _T(",")); - rec.install_path = ini.GetKeyValue(i, _T("InstallPath")); - rec.version = ini.GetKeyValue(i, _T("Version")); - ini.GetKeyValue(i, _T("Size")).ToLong(&rec.bytes); - rec.date = ini.GetKeyValue(i, _T("Date")); - rec.installable = ini.GetKeyValue(i, _T("Execute")) == _T("1"); - - // read .entry file (if exists) - rec.entry = (!rec.name.IsEmpty() ? rec.name : wxFileName(rec.local_file).GetName()) + _T(".entry"); - IniParser p; - p.ParseFile(appPath + rec.entry); - rec.installed_version = p.GetValue(_T("Setup"), _T("AppVersion")); - - rec.downloaded = wxFileExists(appPath + _T("/") + rec.local_file); - rec.installed = !rec.installed_version.IsEmpty(); - - // calculate size - rec.size = GetSizeString(rec.bytes); - - // fix-up - if (rec.name.IsEmpty()) - rec.name = rec.title; - rec.desc.Replace(_T(""), _T("\n")); - rec.desc.Replace(_T(""), _T("\r")); - wxURL url(rec.remote_file); - if (!url.GetServer().IsEmpty()) - { - rec.remote_server = url.GetScheme() + _T("://") + url.GetServer(); - int pos = rec.remote_file.Find(url.GetServer()); - if (pos != wxNOT_FOUND) - rec.remote_file.Remove(0, pos + url.GetServer().Length() + 1); - } - else - rec.remote_server = currentServer; - } - - *recCount = groupsCount; - return list; -} - -UpdateRec* FindRec(const wxString& title, const wxString& version, const wxString& revision, UpdateRec* list, int count) -{ - for (int i = 0; i < count; ++i) - { - if (list[i].title == title && list[i].version == version) { - if (revision.IsEmpty()) { - return &list[i]; - } else if (list[i].revision == revision) { - return &list[i]; - } - } - } - return 0; -} diff --git a/tests/01uni_multi/[result]/conf.h b/tests/01uni_multi/[result]/conf.h deleted file mode 100644 index 6788ba5..0000000 --- a/tests/01uni_multi/[result]/conf.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef CONF_H -#define CONF_H - -#include -#include -#include "cbiniparser.h" - -struct UpdateRec -{ - wxString entry; //! .entry filename for installed - wxString title; - wxString name; - wxString desc; - wxString remote_server; - wxString remote_file; - wxString local_file; - wxArrayString groups; - wxString install_path; //! ignored - wxString version; - wxString revision; - wxString installed_version; - long int bytes; - float kilobytes; - float megabytes; - wxString size; - wxString date; - bool installable; - bool downloaded; - bool installed; -}; - -extern wxString g_MasterPath; - -UpdateRec* ReadConf(const IniParser& ini, int* recCount, const wxString& currentServer, const wxString& appPath); -UpdateRec* FindRec(const wxString& title, const wxString& version, const wxString& revision, UpdateRec* list, int count); -// utility -wxString GetSizeString(int bytes); - -#endif // CONF_H diff --git a/tests/01uni_multi/[result]/manifest.xml b/tests/01uni_multi/[result]/manifest.xml deleted file mode 100644 index 578085c..0000000 --- a/tests/01uni_multi/[result]/manifest.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - diff --git a/tests/01uni_multi/[result]/updatedlg.cpp b/tests/01uni_multi/[result]/updatedlg.cpp deleted file mode 100644 index 99e01e6..0000000 --- a/tests/01uni_multi/[result]/updatedlg.cpp +++ /dev/null @@ -1,742 +0,0 @@ -/* - * This file is part of the Code::Blocks IDE and licensed under the GNU General Public License, version 3 - * http://www.gnu.org/licenses/gpl-3.0.html - * - * $Revision: 4909 $ - * $Id: updatedlg.cpp 4909 2008-02-27 13:15:26Z mortenmacfly $ - * $HeadURL: http://svn.berlios.de/svnroot/repos/codeblocks/tags/8.02/src/plugins/contrib/devpak_plugin/updatedlg.cpp $ - */ - -#include "updatedlg.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "devpakinstaller.h" -#include "crc32.h" - -#include "manager.h" -#include "configmanager.h" -#include "globals.h" - -int idNet = wxNewId(); -int idPopupInstall = wxNewId(); -int idPopupDownload = wxNewId(); -int idPopupDownloadAndInstall = wxNewId(); -int idPopupUninstall = wxNewId(); - -BEGIN_EVENT_TABLE(UpdateDlg, wxDialog) - EVT_UPDATE_UI(-1, UpdateDlg::OnUpdateUI) - EVT_TREE_SEL_CHANGED(XRCID("tvCategories"), UpdateDlg::OnTreeSelChanged) - EVT_LIST_ITEM_SELECTED(XRCID("lvFiles"), UpdateDlg::OnFileSelected) - EVT_LIST_ITEM_DESELECTED(XRCID("lvFiles"), UpdateDlg::OnFileDeSelected) - EVT_LIST_ITEM_RIGHT_CLICK(XRCID("lvFiles"), UpdateDlg::OnFileRightClick) - EVT_MENU(idPopupDownload, UpdateDlg::OnDownload) - EVT_MENU(idPopupDownloadAndInstall, UpdateDlg::OnDownloadAndInstall) - EVT_MENU(idPopupInstall, UpdateDlg::OnInstall) - EVT_MENU(idPopupUninstall, UpdateDlg::OnUninstall) - EVT_COMBOBOX(XRCID("cmbServer"), UpdateDlg::OnServerChange) - EVT_COMBOBOX(XRCID("cmbFilter"), UpdateDlg::OnFilterChange) - EVT_CHECKBOX(XRCID("chkCache"), UpdateDlg::OnServerChange) - EVT_CBNET_CONNECT(idNet, UpdateDlg::OnConnect) - EVT_CBNET_DISCONNECT(idNet, UpdateDlg::OnDisConnect) - EVT_CBNET_PROGRESS(idNet, UpdateDlg::OnProgress) - EVT_CBNET_ABORTED(idNet, UpdateDlg::OnAborted) - EVT_CBNET_START_DOWNLOAD(idNet, UpdateDlg::OnDownloadStarted) - EVT_CBNET_END_DOWNLOAD(idNet, UpdateDlg::OnDownloadEnded) -END_EVENT_TABLE() - -UpdateDlg::UpdateDlg(wxWindow* parent) - : m_Recs(0), - m_RecsCount(0), - m_CurrFileSize(0), - m_LastBlockSize(0), - m_HasUpdated(false), - m_FirstTimeCheck(true), - m_Net(this, idNet, _T("http://devpaks.sourceforge.net/")) -{ - //ctor - wxXmlResource::Get()->LoadDialog(this, parent, _T("MainFrame")); - CreateListColumns(); - FillServers(); - UpdateStatus(_("Ready"), 0); -} - -UpdateDlg::~UpdateDlg() -{ - //dtor - delete[] m_Recs; - m_RecsCount = 0; -} - -void UpdateDlg::EndModal(int retCode) -{ - if (!m_Net.IsConnected() || retCode != wxID_CANCEL) - { - wxDialog::EndModal(retCode); - return; - } - - if (m_Net.IsConnected()) - m_Net.Abort(); -} - -void UpdateDlg::CreateListColumns() -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->InsertColumn(0, _("Title")); - lst->InsertColumn(1, _("Version")); - lst->InsertColumn(2, _("Installed")); - lst->InsertColumn(3, _("Size"), wxLIST_FORMAT_RIGHT); - lst->InsertColumn(4, _("Rev")); - - lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3 + 40) - 6 ); // 1st column takes all remaining space - lst->SetColumnWidth(1, 64); - lst->SetColumnWidth(2, 64); - lst->SetColumnWidth(3, 64); - lst->SetColumnWidth(4, 40); -} - -void UpdateDlg::AddRecordToList(UpdateRec* rec) -{ - if (!rec) - return; - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int idx = lst->GetItemCount(); - lst->InsertItem(idx, rec->title); - lst->SetItem(idx, 1, rec->version); - lst->SetItem(idx, 2, rec->installed_version); - lst->SetItem(idx, 3, rec->size); - lst->SetItem(idx, 4, rec->revision); -} - -wxString UpdateDlg::GetListColumnText(int idx, int col) { - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int index = idx == -1 ? lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED) : idx; - wxListItem info; - info.SetId(index); - info.SetColumn(col); - info.SetMask(wxLIST_MASK_TEXT); - lst->GetItem(info); - return info.GetText(); -} - -void UpdateDlg::SetListColumnText(int idx, int col, const wxString& text) -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int index = idx == -1 ? lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED) : idx; - wxListItem it; - it.m_itemId = index; - it.m_col = col; - it.m_mask = wxLIST_MASK_TEXT; - it.m_text = text; - lst->SetItem(it); -} - -void UpdateDlg::UpdateStatus(const wxString& status, int curProgress, int maxProgress) -{ - wxStaticText* lbl = XRCCTRL(*this, "lblStatus", wxStaticText); - if (lbl->GetLabel() != status) - lbl->SetLabel(status); - if (curProgress != -1) - XRCCTRL(*this, "gauProgress", wxGauge)->SetValue(curProgress); - if (maxProgress != -1) - XRCCTRL(*this, "gauProgress", wxGauge)->SetRange(maxProgress); -} - -void UpdateDlg::EnableButtons(bool update, bool abort) -{ - wxButton* btnCl = XRCCTRL(*this, "wxID_CANCEL", wxButton); - - btnCl->Enable(abort); - // disable server list and cache checkbox while downloading - XRCCTRL(*this, "cmbServer", wxComboBox)->Enable(!m_Net.IsConnected()); - XRCCTRL(*this, "chkCache", wxCheckBox)->Enable(!m_Net.IsConnected()); - - wxYield(); -} - -void UpdateDlg::FillGroups() -{ - UpdateStatus(_("Parsing list of updates"), 0, m_RecsCount - 1); - - // get a list of unique group names - wxArrayString groups; - for (int i = 0; i < m_RecsCount; ++i) - { - for (unsigned int x = 0; x < m_Recs[i].groups.GetCount(); ++x) - { - if (m_Recs[i].groups[x].IsEmpty()) - continue; - if (groups.Index(m_Recs[i].groups[x]) == wxNOT_FOUND) - { - if (FilterRec(&m_Recs[i])) - groups.Add(m_Recs[i].groups[x]); - } - } - } - - // create the groups tree - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - tree->Freeze(); - tree->DeleteAllItems(); - wxTreeItemId root = tree->AddRoot(_("All categories")); - for (unsigned int i = 0; i < groups.GetCount(); ++i) - { - tree->AppendItem(root, groups[i]); - } - tree->SortChildren(root); - tree->Thaw(); - tree->Expand(root); - tree->SelectItem(root); // this calls the event - - UpdateStatus(_("Done parsing list of updates"), 0); -} - -void UpdateDlg::FillFiles(const wxTreeItemId& id) -{ - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->Freeze(); - lst->ClearAll(); - CreateListColumns(); - - wxString group = id == tree->GetRootItem() ? _T("") : tree->GetItemText(id); - - // add files belonging to group - int counter = 0; - for (int i = 0; i < m_RecsCount; ++i) - { - if (group.IsEmpty() || (!m_Recs[i].groups.IsEmpty() && m_Recs[i].groups.Index(group) != wxNOT_FOUND)) - { - // filter - if (FilterRec(&m_Recs[i])) - { - AddRecordToList(&m_Recs[i]); - ++counter; - } - } - } - lst->Thaw(); - - // select first item - lst->SetItemState(0, wxLIST_STATE_SELECTED | wxLIST_STATE_FOCUSED, wxLIST_STATE_SELECTED | wxLIST_STATE_FOCUSED); -} - -void UpdateDlg::FillFileDetails(const wxListItem& id) -{ - wxTextCtrl* txt = XRCCTRL(*this, "txtInfo", wxTextCtrl); - txt->Clear(); - - UpdateRec* cur = GetRecFromListView(); - if (!cur) - { - txt->Clear(); - EnableButtons(); - return; - } - txt->AppendText(_("Name: ") + cur->name + _T("\n")); -// txt->AppendText(_("Server: ") + cur->remote_server + _T("\n")); -// txt->AppendText(_("File: ") + cur->remote_file + _T("\n")); - txt->AppendText(_("Version: ") + cur->version + _T("\n")); - txt->AppendText(_("Size: ") + cur->size + _T("\n")); - txt->AppendText(_("Date: ") + cur->date + _T("\n\n")); - txt->AppendText(_("Description: \n")); - txt->AppendText(cur->desc); - - txt->SetSelection(0, 0); - txt->SetInsertionPoint(0); -} - -void UpdateDlg::InternetUpdate(bool forceDownload) -{ - UpdateStatus(_("Please wait...")); - m_HasUpdated = false; - m_Net.SetServer(GetCurrentServer()); - - EnableButtons(false); - forceDownload = forceDownload || !XRCCTRL(*this, "chkCache", wxCheckBox)->GetValue(); - - bool forceDownloadMirrors = forceDownload || !wxFileExists(GetMirrorsFilename()); - if (forceDownloadMirrors) - { - if (!m_Net.DownloadFile(_T("mirrors.cfg"), GetMirrorsFilename())) - { - UpdateStatus(_("Error downloading list of mirrors"), 0, 0); - return; - } - else - { - FillServers(); - m_Net.SetServer(GetCurrentServer()); // update server based on mirrors - } - } - - wxString config = GetConfFilename(); - forceDownload = forceDownload || !wxFileExists(config); - if (forceDownload && !m_Net.DownloadFile(_T("webupdate.conf"), config)) - { - UpdateStatus(_("Error downloading list of updates"), 0, 0); - return; - } - else - { - IniParser ini; - if (!ini.ParseFile(config)) - { - UpdateStatus(_("Failed to retrieve the list of updates"), 0, 0); - return; - } - ini.Sort(); - - if (m_Recs) - delete[] m_Recs; - - // remember to delete[] m_Recs when we 're done with it!!! - // it's our responsibility once given to us - m_Recs = ReadConf(ini, &m_RecsCount, GetCurrentServer(), GetPackagePath()); - - FillGroups(); - } - EnableButtons(); - UpdateStatus(_("Ready"), 0, 0); - - m_HasUpdated = true; -} - -void UpdateDlg::FillServers() -{ - wxComboBox* cmb = XRCCTRL(*this, "cmbServer", wxComboBox); - cmb->Clear(); - m_Servers.Clear(); - - IniParser ini; - ini.ParseFile(GetMirrorsFilename()); - int group = ini.FindGroupByName(_T("WebUpdate mirrors")); - for (int i = 0; group != -1 && i < ini.GetKeysCount(group); ++i) - { - cmb->Append(ini.GetKeyName(group, i)); - m_Servers.Add(ini.GetKeyValue(group, i)); - } - if (cmb->GetCount() == 0) - { - cmb->Append(_("devpaks.org Community Devpaks")); - m_Servers.Add(_T("http://devpaks.sourceforge.net/")); - } - cmb->SetSelection(0); -} - -wxString UpdateDlg::GetConfFilename() -{ - int server_hash = GetTextCRC32(GetCurrentServer().mb_str()); - wxString config; - config = ConfigManager::GetConfigFolder() + wxFILE_SEP_PATH; - config.Printf(_T("%sdevpak_%x.conf"), config.c_str(), server_hash); - return config; -} - -wxString UpdateDlg::GetMirrorsFilename() const -{ - wxString config; - config = ConfigManager::GetConfigFolder() + wxFILE_SEP_PATH + _T("devpak_mirrors.cfg"); - return config; -} - -wxString UpdateDlg::GetCurrentServer() const -{ - return m_Servers[XRCCTRL(*this, "cmbServer", wxComboBox)->GetSelection()]; -} - -wxString UpdateDlg::GetBasePath() const -{ - return g_MasterPath + wxFILE_SEP_PATH; -} - -wxString UpdateDlg::GetPackagePath() const -{ - return GetBasePath() + _T("Packages") + wxFILE_SEP_PATH; -} - -bool UpdateDlg::FilterRec(UpdateRec* rec) -{ - if (!rec) - return false; - wxComboBox* cmb = XRCCTRL(*this, "cmbFilter", wxComboBox); - switch (cmb->GetSelection()) - { - case 0: // All - return true; - - case 1: // Installed - return rec->installed; - - case 2: // installed with update available - return rec->installed && rec->version != rec->installed_version; - - case 3: // downloaded but not installed - return rec->downloaded && !rec->installed; - - case 4: // not installed - return !rec->downloaded && !rec->installed; - - default: - return false; - } - return false; // doesn't reach here -} - -void UpdateDlg::ApplyFilter() -{ - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - - FillGroups(); - FillFiles(tree->GetSelection()); - EnableButtons(); -} - -UpdateRec* UpdateDlg::GetRecFromListView() -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int index = lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED); - if (index == -1) - return 0; - wxString title = lst->GetItemText(index); - wxString version = GetListColumnText(index, 1); - wxString revision = GetListColumnText(index, 4); - return FindRec(title, version, revision, m_Recs, m_RecsCount); -} - -void UpdateDlg::DownloadFile(bool dontInstall) -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - - if (rec->version == rec->installed_version) - { - if (wxMessageBox(_("You seem to have installed the latest version.\nAre you sure you want to proceed?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxNO) - return; - } - - if (!CreateDirRecursively(GetPackagePath())) - { - wxMessageBox(_("Can't create directory ") + GetPackagePath(), _("Error"), wxICON_ERROR); - return; - } - - if (wxFileExists(GetPackagePath() + rec->local_file)) - { - if (wxMessageBox(_("This file already exists!\nAre you sure you want to download it again?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxNO && - rec->installable) - { - if (!dontInstall && wxMessageBox(_("Do you want to force-install it?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InstallFile(); - return; - } - } - - m_Net.SetServer(rec->remote_server); - - EnableButtons(false); - if (!m_Net.DownloadFile(rec->remote_file, GetPackagePath() + rec->local_file)) - { - rec->downloaded = false; - UpdateStatus(_("Error downloading file: ") + rec->remote_server + _T(" > ") + rec->remote_file, 0, 0); - return; - } - else - rec->downloaded = true; - UpdateStatus(_("Ready"), 0, 0); - EnableButtons(); -} - -void UpdateDlg::InstallFile() -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - wxYield(); - - if (rec->title == _T("WebUpdate Mirrors list")) - { - InstallMirrors(GetPackagePath() + rec->local_file); - rec->installed = true; - ApplyFilter(); - UpdateStatus(_("Ready"), 0, 0); - return; - } - else if (!rec->installable) - { - UpdateStatus(_("Ready"), 0, 0); - return; - } - - if (!CreateDirRecursively(GetPackagePath())) - { - UpdateStatus(_("Ready"), 0, 0); - wxMessageBox(_("Can't create directory ") + GetPackagePath(), _("Error"), wxICON_ERROR); - return; - } - - wxArrayString files; - DevPakInstaller inst; - if (inst.Install(rec->name, GetPackagePath() + rec->local_file, GetBasePath(), &files)) - { -// wxFileName fname(GetPackagePath() + rec->local_file); -// fname.SetExt("entry"); -// fname.SetName(rec->title); -// CreateEntryFile(rec, fname.GetFullPath(), files); - CreateEntryFile(rec, GetPackagePath() + rec->entry, files); - wxMessageBox(_("DevPak installed"), _("Message"), wxICON_INFORMATION); - - // refresh installed_version - rec->installed = true; - rec->installed_version = rec->version; - SetListColumnText(-1, 2, rec->installed_version); - } - else - { - wxMessageBox(_("DevPak was not installed.\nStatus:\n") + inst.GetStatus(), _("Error"), wxICON_ERROR); - } - UpdateStatus(_("Ready"), 0, 0); -} - -void UpdateDlg::InstallMirrors(const wxString& file) -{ - if (!wxCopyFile(file, GetMirrorsFilename(), true)) - wxMessageBox(_("Can't install mirrors file: ") + file, _("Error"), wxICON_ERROR); - else - { - wxRemoveFile(file); - FillServers(); - m_Net.SetServer(GetCurrentServer()); // update server based on mirrors - wxMessageBox(_("Mirrors installed"), _("Information"), wxICON_INFORMATION); - } -} - -void UpdateDlg::UninstallFile() -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - wxYield(); - - DevPakInstaller inst; - if (inst.Uninstall(GetPackagePath() + rec->entry)) - { - wxMessageBox(_("DevPak uninstalled"), _("Message"), wxICON_INFORMATION); - - // refresh installed_version - rec->installed_version.Clear(); - rec->installed = false; - SetListColumnText(-1, 2, rec->installed_version); - } - else - { - wxMessageBox(_("DevPak was not uninstalled.\nStatus:\n") + inst.GetStatus(), _("Error"), wxICON_ERROR); - } -} - -void UpdateDlg::CreateEntryFile(UpdateRec* rec, const wxString& filename, const wxArrayString& files) -{ - wxString entry; - entry << _T("[Setup]\n"); - entry << _T("AppName=") << rec->name << _T("\n"); - entry << _T("AppVersion=") << rec->version << _T("\n"); - - entry << _T("\n"); - entry << _T("[Files]\n"); - for (unsigned int i = 0; i < files.GetCount(); ++i) - { - entry << files[i] << _T("\n"); - } - - wxFile f(filename, wxFile::write); - if (f.IsOpened()) - { - f.Write(entry.mb_str(wxConvUTF8),entry.Length()); - } -} - -void UpdateDlg::OnFileRightClick(wxListEvent& event) -{ -// LOGSTREAM << "pt.x=" << event.GetPoint().x << ", pt.y=" << event.GetPoint().y << '\n'; - UpdateRec* rec = GetRecFromListView(); - if (!rec) - return; - - wxMenu popup; - popup.Append(idPopupDownloadAndInstall, _("Download && install")); - popup.AppendSeparator(); - popup.Append(idPopupDownload, _("Download")); - popup.Append(idPopupInstall, _("Install")); - popup.AppendSeparator(); - popup.Append(idPopupUninstall, _("Uninstall")); - - bool canDl = !rec->downloaded || rec->version != rec->installed_version; - bool canInst = rec->downloaded && (!rec->installed || rec->version != rec->installed_version); - - popup.Enable(idPopupDownload, canDl); - popup.Enable(idPopupInstall, canInst); - popup.Enable(idPopupDownloadAndInstall, canInst || canDl); - popup.Enable(idPopupUninstall, rec->installed); - - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->PopupMenu(&popup, event.GetPoint()); -} - -void UpdateDlg::OnFileDeSelected(wxListEvent& event) -{ - wxListItem id; - FillFileDetails(id); - EnableButtons(); -} - -void UpdateDlg::OnFileSelected(wxListEvent& event) -{ - FillFileDetails(event.GetItem()); - EnableButtons(); -} - -void UpdateDlg::OnTreeSelChanged(wxTreeEvent& event) -{ - FillFiles(event.GetItem()); - EnableButtons(); -} - -void UpdateDlg::OnDownload(wxCommandEvent& event) -{ - DownloadFile(true); -} - -void UpdateDlg::OnInstall(wxCommandEvent& event) -{ - InstallFile(); -} - -void UpdateDlg::OnUninstall(wxCommandEvent& event) -{ - UninstallFile(); -} - -void UpdateDlg::OnDownloadAndInstall(wxCommandEvent& event) -{ - DownloadFile(); -} - -void UpdateDlg::OnServerChange(wxCommandEvent& event) -{ - InternetUpdate(); -} - -void UpdateDlg::OnFilterChange(wxCommandEvent& event) -{ - ApplyFilter(); -} - -void UpdateDlg::OnConnect(wxCommandEvent& event) -{ - XRCCTRL(*this, "wxID_CANCEL", wxButton)->SetLabel(_("Abort")); - EnableButtons(); -} - -void UpdateDlg::OnDisConnect(wxCommandEvent& event) -{ - XRCCTRL(*this, "wxID_CANCEL", wxButton)->SetLabel(_("Close")); - EnableButtons(); -} - -void UpdateDlg::OnProgress(wxCommandEvent& event) -{ - int prg = -1; - if (m_CurrFileSize != 0) - prg = event.GetInt() * 100 / m_CurrFileSize; - UpdateStatus(_("Downloading: ") + event.GetString(), prg); - - wxStaticText* lbl = XRCCTRL(*this, "lblProgress", wxStaticText); - - wxString msg; - msg.Printf(_("%s of %s"), GetSizeString(event.GetInt()).c_str(), GetSizeString(m_CurrFileSize).c_str()); - lbl->SetLabel(msg); -} - -void UpdateDlg::OnAborted(wxCommandEvent& event) -{ - UpdateStatus(_("Download aborted: ") + event.GetString(), 0, 0); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; -} - -void UpdateDlg::OnDownloadStarted(wxCommandEvent& event) -{ - m_CurrFileSize = event.GetInt(); - UpdateStatus(_("Download started: ") + event.GetString(), 0, 100); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; -} - -void UpdateDlg::OnDownloadEnded(wxCommandEvent& event) -{ - UpdateStatus(_("Download finished: ") + event.GetString()); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; - - if (m_HasUpdated && event.GetInt() == 0) - { - UpdateRec* rec = GetRecFromListView(); - if (rec) - { - if (rec->bytes != m_CurrFileSize) - wxMessageBox(_("File size mismatch for ") + event.GetString() + _("!\n\n" - "This, usually, means one of three things:\n" - "1) The reported size in the update list is wrong. The DevPak might still be valid.\n" - "2) The file's location returned a web error-page. Invalid DevPak...\n" - "3) The file is corrupt...\n\n" - "You can try to install it anyway. If it is not a valid DevPak, the operation will fail."), - _("Warning"), wxICON_WARNING); - } - if (rec && rec->installable && wxMessageBox(_("Do you want to install ") + event.GetString() + _(" now?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InstallFile(); - else if (rec && rec->title == _T("WebUpdate Mirrors list")) - InstallMirrors(GetPackagePath() + rec->local_file); - } - m_CurrFileSize = 0; -} - -void UpdateDlg::OnUpdateUI(wxUpdateUIEvent& event) -{ - // hack to display the download message *after* the dialog has been shown... - if (m_FirstTimeCheck) - { - m_FirstTimeCheck = false; // no more, just once - wxString config = GetConfFilename(); - if (wxFileExists(config)) - InternetUpdate(); - else - { - if (wxMessageBox(_("A list of updates needs to be downloaded.\nDo you want to do this now?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InternetUpdate(true); - } - } -} diff --git a/tests/01uni_multi/[result]/updatedlg.h b/tests/01uni_multi/[result]/updatedlg.h deleted file mode 100644 index 596b9c6..0000000 --- a/tests/01uni_multi/[result]/updatedlg.h +++ /dev/null @@ -1,74 +0,0 @@ -#ifndef UPDATEDLG_H -#define UPDATEDLG_H - -#include -#include -#include -#include "cbnetwork.h" -#include "conf.h" - -class UpdateDlg : public wxDialog -{ - public: - UpdateDlg(wxWindow* parent); - virtual ~UpdateDlg(); - - void EndModal(int retCode); - protected: - void OnFileSelected(wxListEvent& event); - void OnFileDeSelected(wxListEvent& event); - void OnFileRightClick(wxListEvent& event); - void OnTreeSelChanged(wxTreeEvent& event); - void OnDownload(wxCommandEvent& event); - void OnInstall(wxCommandEvent& event); - void OnUninstall(wxCommandEvent& event); - void OnDownloadAndInstall(wxCommandEvent& event); - void OnUpdate(wxCommandEvent& event); - void OnServerChange(wxCommandEvent& event); - void OnFilterChange(wxCommandEvent& event); - void OnConnect(wxCommandEvent& event); - void OnDisConnect(wxCommandEvent& event); - void OnProgress(wxCommandEvent& event); - void OnAborted(wxCommandEvent& event); - void OnDownloadStarted(wxCommandEvent& event); - void OnDownloadEnded(wxCommandEvent& event); - void OnUpdateUI(wxUpdateUIEvent& event); - private: - void InternetUpdate(bool forceDownload = false); - void DownloadFile(bool dontInstall = false); - void InstallFile(); - void UninstallFile(); - void InstallMirrors(const wxString& file); - void CreateEntryFile(UpdateRec* rec, const wxString& filename, const wxArrayString& files); - void EnableButtons(bool update = true, bool abort = true); - void FillServers(); - void FillGroups(); - void FillFiles(const wxTreeItemId& id); - void FillFileDetails(const wxListItem& id); - void UpdateStatus(const wxString& status, int curProgress = -1, int maxProgress = -1); - UpdateRec* GetRecFromListView(); - void CreateListColumns(); - void AddRecordToList(UpdateRec* rec); - wxString GetListColumnText(int idx, int col); - void SetListColumnText(int idx, int col, const wxString& text); - - wxString GetConfFilename(); - wxString GetMirrorsFilename() const; - wxString GetCurrentServer() const; - wxString GetBasePath() const; - wxString GetPackagePath() const; - bool FilterRec(UpdateRec* rec); - void ApplyFilter(); - - UpdateRec* m_Recs; - wxArrayString m_Servers; - int m_RecsCount; - int m_CurrFileSize; - int m_LastBlockSize; // for bps - bool m_HasUpdated; - bool m_FirstTimeCheck; - cbNetwork m_Net; - DECLARE_EVENT_TABLE(); -}; - -#endif // UPDATEDLG_H diff --git a/tests/01uni_multi/conf.cpp b/tests/01uni_multi/conf.cpp deleted file mode 100644 index 9962641..0000000 --- a/tests/01uni_multi/conf.cpp +++ /dev/null @@ -1,110 +0,0 @@ -/* - * This file is part of the Code::Blocks IDE and licensed under the GNU General Public License, version 3 - * http://www.gnu.org/licenses/gpl-3.0.html - * - * $Revision: 4909 $ - * $Id: conf.cpp 4909 2008-02-27 13:15:26Z mortenmacfly $ - * $HeadURL: http://svn.berlios.de/svnroot/repos/codeblocks/tags/8.02/src/plugins/contrib/devpak_plugin/conf.cpp $ - */ - -#include "conf.h" -#include -#include -#include -#include - -wxString g_MasterPath; - -wxString GetSizeString(int bytes) -{ - wxString ret; - float kilobytes = (float)bytes / 1024.0f; - float megabytes = kilobytes / 1024.0f; - if (megabytes >= 1.0f) - ret.Printf(_("%.2f MB"), megabytes); - else if (kilobytes >= 1.0f) - ret.Printf(_("%.2f KB"), kilobytes); - else - ret.Printf(_("%ld bytes"), bytes); - return ret; -} - -UpdateRec* ReadConf(const IniParser& ini, int* recCount, const wxString& currentServer, const wxString& appPath) -{ - *recCount = 0; - int groupsCount = ini.GetGroupsCount(); - if (groupsCount == 0) - return 0; - - UpdateRec* list = new UpdateRec[ini.GetGroupsCount()]; - for (int i = 0; i < groupsCount; ++i) - { - UpdateRec& rec = list[i]; - - rec.title = ini.GetGroupName(i); - - // fix title - // devpaks.org has changed the title to contain some extra info - // e.g.: [libunicows Library version: 1.1.1 Devpak revision: 1sid] - // we don't need this extra info, so if we find it we remove it - int pos = rec.title.Find(_T("Library version:")); - if (pos != -1) - { - rec.title.Truncate(pos); - rec.title = rec.title.Trim(false); - rec.title = rec.title.Trim(true); - } - - rec.name = ini.GetKeyValue(i, _T("Name")); - rec.desc = ini.GetKeyValue(i, _T("Description")); - rec.remote_file = ini.GetKeyValue(i, _T("RemoteFilename")); - rec.local_file = ini.GetKeyValue(i, _T("LocalFilename")); - rec.groups = GetArrayFromString(ini.GetKeyValue(i, _T("Group")), _T(",")); - rec.install = ini.GetKeyValue(i, _T("InstallPath")); - rec.version = ini.GetKeyValue(i, _T("Version")); - ini.GetKeyValue(i, _T("Size")).ToLong(&rec.bytes); - rec.date = ini.GetKeyValue(i, _T("Date")); - rec.installable = ini.GetKeyValue(i, _T("Execute")) == _T("1"); - - // read .entry file (if exists) - rec.entry = (!rec.name.IsEmpty() ? rec.name : wxFileName(rec.local_file).GetName()) + _T(".entry"); - IniParser p; - p.ParseFile(appPath + rec.entry); - rec.installed_version = p.GetValue(_T("Setup"), _T("AppVersion")); - - rec.downloaded = wxFileExists(appPath + _T("/") + rec.local_file); - rec.installed = !rec.installed_version.IsEmpty(); - - // calculate size - rec.size = GetSizeString(rec.bytes); - - // fix-up - if (rec.name.IsEmpty()) - rec.name = rec.title; - rec.desc.Replace(_T(""), _T("\n")); - rec.desc.Replace(_T(""), _T("\r")); - wxURL url(rec.remote_file); - if (!url.GetServer().IsEmpty()) - { - rec.remote_server = url.GetScheme() + _T("://") + url.GetServer(); - int pos = rec.remote_file.Find(url.GetServer()); - if (pos != wxNOT_FOUND) - rec.remote_file.Remove(0, pos + url.GetServer().Length() + 1); - } - else - rec.remote_server = currentServer; - } - - *recCount = groupsCount; - return list; -} - -UpdateRec* FindRecByTitle(const wxString& title, UpdateRec* list, int count) -{ - for (int i = 0; i < count; ++i) - { - if (list[i].title == title) - return &list[i]; - } - return 0; -} diff --git a/tests/01uni_multi/conf.h b/tests/01uni_multi/conf.h deleted file mode 100644 index 03a0179..0000000 --- a/tests/01uni_multi/conf.h +++ /dev/null @@ -1,38 +0,0 @@ -#ifndef CONF_H -#define CONF_H - -#include -#include -#include "cbiniparser.h" - -struct UpdateRec -{ - wxString entry; - wxString title; - wxString name; - wxString desc; - wxString remote_server; - wxString remote_file; - wxString local_file; - wxArrayString groups; - wxString install; - wxString version; - wxString installed_version; - long int bytes; - float kilobytes; - float megabytes; - wxString size; - wxString date; - bool installable; - bool downloaded; - bool installed; -}; - -extern wxString g_MasterPath; - -UpdateRec* ReadConf(const IniParser& ini, int* recCount, const wxString& currentServer, const wxString& appPath); -UpdateRec* FindRecByTitle(const wxString& title, UpdateRec* list, int count); -// utility -wxString GetSizeString(int bytes); - -#endif // CONF_H diff --git a/tests/01uni_multi/manifest.xml b/tests/01uni_multi/manifest.xml deleted file mode 100644 index c7bc705..0000000 --- a/tests/01uni_multi/manifest.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - diff --git a/tests/01uni_multi/updatedlg.cpp b/tests/01uni_multi/updatedlg.cpp deleted file mode 100644 index 829b38a..0000000 --- a/tests/01uni_multi/updatedlg.cpp +++ /dev/null @@ -1,726 +0,0 @@ -/* - * This file is part of the Code::Blocks IDE and licensed under the GNU General Public License, version 3 - * http://www.gnu.org/licenses/gpl-3.0.html - * - * $Revision: 4909 $ - * $Id: updatedlg.cpp 4909 2008-02-27 13:15:26Z mortenmacfly $ - * $HeadURL: http://svn.berlios.de/svnroot/repos/codeblocks/tags/8.02/src/plugins/contrib/devpak_plugin/updatedlg.cpp $ - */ - -#include "updatedlg.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "devpakinstaller.h" -#include "crc32.h" - -#include "manager.h" -#include "configmanager.h" -#include "globals.h" - -int idNet = wxNewId(); -int idPopupInstall = wxNewId(); -int idPopupDownload = wxNewId(); -int idPopupDownloadAndInstall = wxNewId(); -int idPopupUninstall = wxNewId(); - -BEGIN_EVENT_TABLE(UpdateDlg, wxDialog) - EVT_UPDATE_UI(-1, UpdateDlg::OnUpdateUI) - EVT_TREE_SEL_CHANGED(XRCID("tvCategories"), UpdateDlg::OnTreeSelChanged) - EVT_LIST_ITEM_SELECTED(XRCID("lvFiles"), UpdateDlg::OnFileSelected) - EVT_LIST_ITEM_DESELECTED(XRCID("lvFiles"), UpdateDlg::OnFileDeSelected) - EVT_LIST_ITEM_RIGHT_CLICK(XRCID("lvFiles"), UpdateDlg::OnFileRightClick) - EVT_MENU(idPopupDownload, UpdateDlg::OnDownload) - EVT_MENU(idPopupDownloadAndInstall, UpdateDlg::OnDownloadAndInstall) - EVT_MENU(idPopupInstall, UpdateDlg::OnInstall) - EVT_MENU(idPopupUninstall, UpdateDlg::OnUninstall) - EVT_COMBOBOX(XRCID("cmbServer"), UpdateDlg::OnServerChange) - EVT_COMBOBOX(XRCID("cmbFilter"), UpdateDlg::OnFilterChange) - EVT_CHECKBOX(XRCID("chkCache"), UpdateDlg::OnServerChange) - EVT_CBNET_CONNECT(idNet, UpdateDlg::OnConnect) - EVT_CBNET_DISCONNECT(idNet, UpdateDlg::OnDisConnect) - EVT_CBNET_PROGRESS(idNet, UpdateDlg::OnProgress) - EVT_CBNET_ABORTED(idNet, UpdateDlg::OnAborted) - EVT_CBNET_START_DOWNLOAD(idNet, UpdateDlg::OnDownloadStarted) - EVT_CBNET_END_DOWNLOAD(idNet, UpdateDlg::OnDownloadEnded) -END_EVENT_TABLE() - -UpdateDlg::UpdateDlg(wxWindow* parent) - : m_Recs(0), - m_RecsCount(0), - m_CurrFileSize(0), - m_LastBlockSize(0), - m_HasUpdated(false), - m_FirstTimeCheck(true), - m_Net(this, idNet, _T("http://devpaks.sourceforge.net/")) -{ - //ctor - wxXmlResource::Get()->LoadDialog(this, parent, _T("MainFrame")); - CreateListColumns(); - FillServers(); - UpdateStatus(_("Ready"), 0); -} - -UpdateDlg::~UpdateDlg() -{ - //dtor - delete[] m_Recs; - m_RecsCount = 0; -} - -void UpdateDlg::EndModal(int retCode) -{ - if (!m_Net.IsConnected() || retCode != wxID_CANCEL) - { - wxDialog::EndModal(retCode); - return; - } - - if (m_Net.IsConnected()) - m_Net.Abort(); -} - -void UpdateDlg::CreateListColumns() -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->InsertColumn(0, _("Title")); - lst->InsertColumn(1, _("Version")); - lst->InsertColumn(2, _("Installed")); - lst->InsertColumn(3, _("Size"), wxLIST_FORMAT_RIGHT); - - lst->SetColumnWidth(0, lst->GetSize().x - (64 * 3) - 2); // 1st column takes all remaining space - lst->SetColumnWidth(1, 64); - lst->SetColumnWidth(2, 64); - lst->SetColumnWidth(3, 64); -} - -void UpdateDlg::AddRecordToList(UpdateRec* rec) -{ - if (!rec) - return; - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int idx = lst->GetItemCount(); - lst->InsertItem(idx, rec->title); - lst->SetItem(idx, 1, rec->version); - lst->SetItem(idx, 2, rec->installed_version); - lst->SetItem(idx, 3, rec->size); -} - -void UpdateDlg::SetListColumnText(int idx, int col, const wxString& text) -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int index = idx == -1 ? lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED) : idx; - wxListItem it; - it.m_itemId = index; - it.m_col = col; - it.m_mask = wxLIST_MASK_TEXT; - it.m_text = text; - lst->SetItem(it); -} - -void UpdateDlg::UpdateStatus(const wxString& status, int curProgress, int maxProgress) -{ - wxStaticText* lbl = XRCCTRL(*this, "lblStatus", wxStaticText); - if (lbl->GetLabel() != status) - lbl->SetLabel(status); - if (curProgress != -1) - XRCCTRL(*this, "gauProgress", wxGauge)->SetValue(curProgress); - if (maxProgress != -1) - XRCCTRL(*this, "gauProgress", wxGauge)->SetRange(maxProgress); -} - -void UpdateDlg::EnableButtons(bool update, bool abort) -{ - wxButton* btnCl = XRCCTRL(*this, "wxID_CANCEL", wxButton); - - btnCl->Enable(abort); - // disable server list and cache checkbox while downloading - XRCCTRL(*this, "cmbServer", wxComboBox)->Enable(!m_Net.IsConnected()); - XRCCTRL(*this, "chkCache", wxCheckBox)->Enable(!m_Net.IsConnected()); - - wxYield(); -} - -void UpdateDlg::FillGroups() -{ - UpdateStatus(_("Parsing list of updates"), 0, m_RecsCount - 1); - - // get a list of unique group names - wxArrayString groups; - for (int i = 0; i < m_RecsCount; ++i) - { - for (unsigned int x = 0; x < m_Recs[i].groups.GetCount(); ++x) - { - if (m_Recs[i].groups[x].IsEmpty()) - continue; - if (groups.Index(m_Recs[i].groups[x]) == wxNOT_FOUND) - { - if (FilterRec(&m_Recs[i])) - groups.Add(m_Recs[i].groups[x]); - } - } - } - - // create the groups tree - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - tree->Freeze(); - tree->DeleteAllItems(); - wxTreeItemId root = tree->AddRoot(_("All categories")); - for (unsigned int i = 0; i < groups.GetCount(); ++i) - { - tree->AppendItem(root, groups[i]); - } - tree->SortChildren(root); - tree->Thaw(); - tree->Expand(root); - tree->SelectItem(root); // this calls the event - - UpdateStatus(_("Done parsing list of updates"), 0); -} - -void UpdateDlg::FillFiles(const wxTreeItemId& id) -{ - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->Freeze(); - lst->ClearAll(); - CreateListColumns(); - - wxString group = id == tree->GetRootItem() ? _T("") : tree->GetItemText(id); - - // add files belonging to group - int counter = 0; - for (int i = 0; i < m_RecsCount; ++i) - { - if (group.IsEmpty() || (!m_Recs[i].groups.IsEmpty() && m_Recs[i].groups.Index(group) != wxNOT_FOUND)) - { - // filter - if (FilterRec(&m_Recs[i])) - { - AddRecordToList(&m_Recs[i]); - ++counter; - } - } - } - lst->Thaw(); - - // select first item - lst->SetItemState(0, wxLIST_STATE_SELECTED | wxLIST_STATE_FOCUSED, wxLIST_STATE_SELECTED | wxLIST_STATE_FOCUSED); -} - -void UpdateDlg::FillFileDetails(const wxListItem& id) -{ - wxTextCtrl* txt = XRCCTRL(*this, "txtInfo", wxTextCtrl); - txt->Clear(); - - UpdateRec* cur = GetRecFromListView(); - if (!cur) - { - txt->Clear(); - EnableButtons(); - return; - } - txt->AppendText(_("Name: ") + cur->name + _T("\n")); -// txt->AppendText(_("Server: ") + cur->remote_server + _T("\n")); -// txt->AppendText(_("File: ") + cur->remote_file + _T("\n")); - txt->AppendText(_("Version: ") + cur->version + _T("\n")); - txt->AppendText(_("Size: ") + cur->size + _T("\n")); - txt->AppendText(_("Date: ") + cur->date + _T("\n\n")); - txt->AppendText(_("Description: \n")); - txt->AppendText(cur->desc); - - txt->SetSelection(0, 0); - txt->SetInsertionPoint(0); -} - -void UpdateDlg::InternetUpdate(bool forceDownload) -{ - UpdateStatus(_("Please wait...")); - m_HasUpdated = false; - m_Net.SetServer(GetCurrentServer()); - - EnableButtons(false); - forceDownload = forceDownload || !XRCCTRL(*this, "chkCache", wxCheckBox)->GetValue(); - - bool forceDownloadMirrors = forceDownload || !wxFileExists(GetMirrorsFilename()); - if (forceDownloadMirrors) - { - if (!m_Net.DownloadFile(_T("mirrors.cfg"), GetMirrorsFilename())) - { - UpdateStatus(_("Error downloading list of mirrors"), 0, 0); - return; - } - else - { - FillServers(); - m_Net.SetServer(GetCurrentServer()); // update server based on mirrors - } - } - - wxString config = GetConfFilename(); - forceDownload = forceDownload || !wxFileExists(config); - if (forceDownload && !m_Net.DownloadFile(_T("webupdate.conf"), config)) - { - UpdateStatus(_("Error downloading list of updates"), 0, 0); - return; - } - else - { - IniParser ini; - if (!ini.ParseFile(config)) - { - UpdateStatus(_("Failed to retrieve the list of updates"), 0, 0); - return; - } - ini.Sort(); - - if (m_Recs) - delete[] m_Recs; - - // remember to delete[] m_Recs when we 're done with it!!! - // it's our responsibility once given to us - m_Recs = ReadConf(ini, &m_RecsCount, GetCurrentServer(), GetPackagePath()); - - FillGroups(); - } - EnableButtons(); - UpdateStatus(_("Ready"), 0, 0); - - m_HasUpdated = true; -} - -void UpdateDlg::FillServers() -{ - wxComboBox* cmb = XRCCTRL(*this, "cmbServer", wxComboBox); - cmb->Clear(); - m_Servers.Clear(); - - IniParser ini; - ini.ParseFile(GetMirrorsFilename()); - int group = ini.FindGroupByName(_T("WebUpdate mirrors")); - for (int i = 0; group != -1 && i < ini.GetKeysCount(group); ++i) - { - cmb->Append(ini.GetKeyName(group, i)); - m_Servers.Add(ini.GetKeyValue(group, i)); - } - if (cmb->GetCount() == 0) - { - cmb->Append(_("devpaks.org Community Devpaks")); - m_Servers.Add(_T("http://devpaks.sourceforge.net/")); - } - cmb->SetSelection(0); -} - -wxString UpdateDlg::GetConfFilename() -{ - int server_hash = GetTextCRC32(GetCurrentServer().mb_str()); - wxString config; - config = ConfigManager::GetConfigFolder() + wxFILE_SEP_PATH; - config.Printf(_T("%sdevpak_%x.conf"), config.c_str(), server_hash); - return config; -} - -wxString UpdateDlg::GetMirrorsFilename() const -{ - wxString config; - config = ConfigManager::GetConfigFolder() + wxFILE_SEP_PATH + _T("devpak_mirrors.cfg"); - return config; -} - -wxString UpdateDlg::GetCurrentServer() const -{ - return m_Servers[XRCCTRL(*this, "cmbServer", wxComboBox)->GetSelection()]; -} - -wxString UpdateDlg::GetBasePath() const -{ - return g_MasterPath + wxFILE_SEP_PATH; -} - -wxString UpdateDlg::GetPackagePath() const -{ - return GetBasePath() + _T("Packages") + wxFILE_SEP_PATH; -} - -bool UpdateDlg::FilterRec(UpdateRec* rec) -{ - if (!rec) - return false; - wxComboBox* cmb = XRCCTRL(*this, "cmbFilter", wxComboBox); - switch (cmb->GetSelection()) - { - case 0: // All - return true; - - case 1: // Installed - return rec->installed; - - case 2: // installed with update available - return rec->installed && rec->version != rec->installed_version; - - case 3: // downloaded but not installed - return rec->downloaded && !rec->installed; - - case 4: // not installed - return !rec->downloaded && !rec->installed; - - default: - return false; - } - return false; // doesn't reach here -} - -void UpdateDlg::ApplyFilter() -{ - wxTreeCtrl* tree = XRCCTRL(*this, "tvCategories", wxTreeCtrl); - - FillGroups(); - FillFiles(tree->GetSelection()); - EnableButtons(); -} - -UpdateRec* UpdateDlg::GetRecFromListView() -{ - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - int index = lst->GetNextItem(-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED); - if (index == -1) - return 0; - wxString title = lst->GetItemText(index); - return FindRecByTitle(title, m_Recs, m_RecsCount); -} - -void UpdateDlg::DownloadFile(bool dontInstall) -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - - if (rec->version == rec->installed_version) - { - if (wxMessageBox(_("You seem to have installed the latest version.\nAre you sure you want to proceed?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxNO) - return; - } - - if (!CreateDirRecursively(GetPackagePath())) - { - wxMessageBox(_("Can't create directory ") + GetPackagePath(), _("Error"), wxICON_ERROR); - return; - } - - if (wxFileExists(GetPackagePath() + rec->local_file)) - { - if (wxMessageBox(_("This file already exists!\nAre you sure you want to download it again?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxNO && - rec->installable) - { - if (!dontInstall && wxMessageBox(_("Do you want to force-install it?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InstallFile(); - return; - } - } - - m_Net.SetServer(rec->remote_server); - - EnableButtons(false); - if (!m_Net.DownloadFile(rec->remote_file, GetPackagePath() + rec->local_file)) - { - rec->downloaded = false; - UpdateStatus(_("Error downloading file: ") + rec->remote_server + _T(" > ") + rec->remote_file, 0, 0); - return; - } - else - rec->downloaded = true; - UpdateStatus(_("Ready"), 0, 0); - EnableButtons(); -} - -void UpdateDlg::InstallFile() -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - wxYield(); - - if (rec->title == _T("WebUpdate Mirrors list")) - { - InstallMirrors(GetPackagePath() + rec->local_file); - rec->installed = true; - ApplyFilter(); - UpdateStatus(_("Ready"), 0, 0); - return; - } - else if (!rec->installable) - { - UpdateStatus(_("Ready"), 0, 0); - return; - } - - if (!CreateDirRecursively(GetPackagePath())) - { - UpdateStatus(_("Ready"), 0, 0); - wxMessageBox(_("Can't create directory ") + GetPackagePath(), _("Error"), wxICON_ERROR); - return; - } - - wxArrayString files; - DevPakInstaller inst; - if (inst.Install(rec->name, GetPackagePath() + rec->local_file, GetBasePath(), &files)) - { -// wxFileName fname(GetPackagePath() + rec->local_file); -// fname.SetExt("entry"); -// fname.SetName(rec->title); -// CreateEntryFile(rec, fname.GetFullPath(), files); - CreateEntryFile(rec, GetPackagePath() + rec->entry, files); - wxMessageBox(_("DevPak installed"), _("Message"), wxICON_INFORMATION); - - // refresh installed_version - rec->installed = true; - rec->installed_version = rec->version; - SetListColumnText(-1, 2, rec->installed_version); - } - else - { - wxMessageBox(_("DevPak was not installed.\nStatus:\n") + inst.GetStatus(), _("Error"), wxICON_ERROR); - } - UpdateStatus(_("Ready"), 0, 0); -} - -void UpdateDlg::InstallMirrors(const wxString& file) -{ - if (!wxCopyFile(file, GetMirrorsFilename(), true)) - wxMessageBox(_("Can't install mirrors file: ") + file, _("Error"), wxICON_ERROR); - else - { - wxRemoveFile(file); - FillServers(); - m_Net.SetServer(GetCurrentServer()); // update server based on mirrors - wxMessageBox(_("Mirrors installed"), _("Information"), wxICON_INFORMATION); - } -} - -void UpdateDlg::UninstallFile() -{ - UpdateStatus(_("Please wait...")); - UpdateRec* rec = GetRecFromListView(); - if (!rec) - { - wxMessageBox(_("No file selected!"), _("Error"), wxICON_ERROR); - UpdateStatus(_("Ready"), 0, 0); - return; - } - wxYield(); - - DevPakInstaller inst; - if (inst.Uninstall(GetPackagePath() + rec->entry)) - { - wxMessageBox(_("DevPak uninstalled"), _("Message"), wxICON_INFORMATION); - - // refresh installed_version - rec->installed_version.Clear(); - rec->installed = false; - SetListColumnText(-1, 2, rec->installed_version); - } - else - { - wxMessageBox(_("DevPak was not uninstalled.\nStatus:\n") + inst.GetStatus(), _("Error"), wxICON_ERROR); - } -} - -void UpdateDlg::CreateEntryFile(UpdateRec* rec, const wxString& filename, const wxArrayString& files) -{ - wxString entry; - entry << _T("[Setup]\n"); - entry << _T("AppName=") << rec->name << _T("\n"); - entry << _T("AppVersion=") << rec->version << _T("\n"); - - entry << _T("\n"); - entry << _T("[Files]\n"); - for (unsigned int i = 0; i < files.GetCount(); ++i) - { - entry << files[i] << _T("\n"); - } - - wxFile f(filename, wxFile::write); - if (f.IsOpened()) - { - f.Write(entry.mb_str(wxConvUTF8),entry.Length()); - } -} - -void UpdateDlg::OnFileRightClick(wxListEvent& event) -{ -// LOGSTREAM << "pt.x=" << event.GetPoint().x << ", pt.y=" << event.GetPoint().y << '\n'; - UpdateRec* rec = GetRecFromListView(); - if (!rec) - return; - - wxMenu popup; - popup.Append(idPopupDownloadAndInstall, _("Download && install")); - popup.AppendSeparator(); - popup.Append(idPopupDownload, _("Download")); - popup.Append(idPopupInstall, _("Install")); - popup.AppendSeparator(); - popup.Append(idPopupUninstall, _("Uninstall")); - - bool canDl = !rec->downloaded || rec->version != rec->installed_version; - bool canInst = rec->downloaded && (!rec->installed || rec->version != rec->installed_version); - - popup.Enable(idPopupDownload, canDl); - popup.Enable(idPopupInstall, canInst); - popup.Enable(idPopupDownloadAndInstall, canInst || canDl); - popup.Enable(idPopupUninstall, rec->installed); - - wxListCtrl* lst = XRCCTRL(*this, "lvFiles", wxListCtrl); - lst->PopupMenu(&popup, event.GetPoint()); -} - -void UpdateDlg::OnFileDeSelected(wxListEvent& event) -{ - wxListItem id; - FillFileDetails(id); - EnableButtons(); -} - -void UpdateDlg::OnFileSelected(wxListEvent& event) -{ - FillFileDetails(event.GetItem()); - EnableButtons(); -} - -void UpdateDlg::OnTreeSelChanged(wxTreeEvent& event) -{ - FillFiles(event.GetItem()); - EnableButtons(); -} - -void UpdateDlg::OnDownload(wxCommandEvent& event) -{ - DownloadFile(true); -} - -void UpdateDlg::OnInstall(wxCommandEvent& event) -{ - InstallFile(); -} - -void UpdateDlg::OnUninstall(wxCommandEvent& event) -{ - UninstallFile(); -} - -void UpdateDlg::OnDownloadAndInstall(wxCommandEvent& event) -{ - DownloadFile(); -} - -void UpdateDlg::OnServerChange(wxCommandEvent& event) -{ - InternetUpdate(); -} - -void UpdateDlg::OnFilterChange(wxCommandEvent& event) -{ - ApplyFilter(); -} - -void UpdateDlg::OnConnect(wxCommandEvent& event) -{ - XRCCTRL(*this, "wxID_CANCEL", wxButton)->SetLabel(_("Abort")); - EnableButtons(); -} - -void UpdateDlg::OnDisConnect(wxCommandEvent& event) -{ - XRCCTRL(*this, "wxID_CANCEL", wxButton)->SetLabel(_("Close")); - EnableButtons(); -} - -void UpdateDlg::OnProgress(wxCommandEvent& event) -{ - int prg = -1; - if (m_CurrFileSize != 0) - prg = event.GetInt() * 100 / m_CurrFileSize; - UpdateStatus(_("Downloading: ") + event.GetString(), prg); - - wxStaticText* lbl = XRCCTRL(*this, "lblProgress", wxStaticText); - - wxString msg; - msg.Printf(_("%s of %s"), GetSizeString(event.GetInt()).c_str(), GetSizeString(m_CurrFileSize).c_str()); - lbl->SetLabel(msg); -} - -void UpdateDlg::OnAborted(wxCommandEvent& event) -{ - UpdateStatus(_("Download aborted: ") + event.GetString(), 0, 0); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; -} - -void UpdateDlg::OnDownloadStarted(wxCommandEvent& event) -{ - m_CurrFileSize = event.GetInt(); - UpdateStatus(_("Download started: ") + event.GetString(), 0, 100); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; -} - -void UpdateDlg::OnDownloadEnded(wxCommandEvent& event) -{ - UpdateStatus(_("Download finished: ") + event.GetString()); - XRCCTRL(*this, "lblProgress", wxStaticText)->SetLabel(_T("")); - m_LastBlockSize = 0; - - if (m_HasUpdated && event.GetInt() == 0) - { - UpdateRec* rec = GetRecFromListView(); - if (rec) - { - if (rec->bytes != m_CurrFileSize) - wxMessageBox(_("File size mismatch for ") + event.GetString() + _("!\n\n" - "This, usually, means one of three things:\n" - "1) The reported size in the update list is wrong. The DevPak might still be valid.\n" - "2) The file's location returned a web error-page. Invalid DevPak...\n" - "3) The file is corrupt...\n\n" - "You can try to install it anyway. If it is not a valid DevPak, the operation will fail."), - _("Warning"), wxICON_WARNING); - } - if (rec && rec->installable && wxMessageBox(_("Do you want to install ") + event.GetString() + _(" now?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InstallFile(); - else if (rec && rec->title == _T("WebUpdate Mirrors list")) - InstallMirrors(GetPackagePath() + rec->local_file); - } - m_CurrFileSize = 0; -} - -void UpdateDlg::OnUpdateUI(wxUpdateUIEvent& event) -{ - // hack to display the download message *after* the dialog has been shown... - if (m_FirstTimeCheck) - { - m_FirstTimeCheck = false; // no more, just once - wxString config = GetConfFilename(); - if (wxFileExists(config)) - InternetUpdate(); - else - { - if (wxMessageBox(_("A list of updates needs to be downloaded.\nDo you want to do this now?"), _("Confirmation"), wxICON_QUESTION | wxYES_NO) == wxYES) - InternetUpdate(true); - } - } -} diff --git a/tests/01uni_multi/updatedlg.h b/tests/01uni_multi/updatedlg.h deleted file mode 100644 index 6e317ca..0000000 --- a/tests/01uni_multi/updatedlg.h +++ /dev/null @@ -1,73 +0,0 @@ -#ifndef UPDATEDLG_H -#define UPDATEDLG_H - -#include -#include -#include -#include "cbnetwork.h" -#include "conf.h" - -class UpdateDlg : public wxDialog -{ - public: - UpdateDlg(wxWindow* parent); - virtual ~UpdateDlg(); - - void EndModal(int retCode); - protected: - void OnFileSelected(wxListEvent& event); - void OnFileDeSelected(wxListEvent& event); - void OnFileRightClick(wxListEvent& event); - void OnTreeSelChanged(wxTreeEvent& event); - void OnDownload(wxCommandEvent& event); - void OnInstall(wxCommandEvent& event); - void OnUninstall(wxCommandEvent& event); - void OnDownloadAndInstall(wxCommandEvent& event); - void OnUpdate(wxCommandEvent& event); - void OnServerChange(wxCommandEvent& event); - void OnFilterChange(wxCommandEvent& event); - void OnConnect(wxCommandEvent& event); - void OnDisConnect(wxCommandEvent& event); - void OnProgress(wxCommandEvent& event); - void OnAborted(wxCommandEvent& event); - void OnDownloadStarted(wxCommandEvent& event); - void OnDownloadEnded(wxCommandEvent& event); - void OnUpdateUI(wxUpdateUIEvent& event); - private: - void InternetUpdate(bool forceDownload = false); - void DownloadFile(bool dontInstall = false); - void InstallFile(); - void UninstallFile(); - void InstallMirrors(const wxString& file); - void CreateEntryFile(UpdateRec* rec, const wxString& filename, const wxArrayString& files); - void EnableButtons(bool update = true, bool abort = true); - void FillServers(); - void FillGroups(); - void FillFiles(const wxTreeItemId& id); - void FillFileDetails(const wxListItem& id); - void UpdateStatus(const wxString& status, int curProgress = -1, int maxProgress = -1); - UpdateRec* GetRecFromListView(); - void CreateListColumns(); - void AddRecordToList(UpdateRec* rec); - void SetListColumnText(int idx, int col, const wxString& text); - - wxString GetConfFilename(); - wxString GetMirrorsFilename() const; - wxString GetCurrentServer() const; - wxString GetBasePath() const; - wxString GetPackagePath() const; - bool FilterRec(UpdateRec* rec); - void ApplyFilter(); - - UpdateRec* m_Recs; - wxArrayString m_Servers; - int m_RecsCount; - int m_CurrFileSize; - int m_LastBlockSize; // for bps - bool m_HasUpdated; - bool m_FirstTimeCheck; - cbNetwork m_Net; - DECLARE_EVENT_TABLE(); -}; - -#endif // UPDATEDLG_H diff --git a/tests/02uni_newline.from b/tests/02uni_newline.from deleted file mode 100644 index cf5a315..0000000 --- a/tests/02uni_newline.from +++ /dev/null @@ -1,4 +0,0 @@ - -read_patch("fix_devpak_install.patch") - -asd \ No newline at end of file diff --git a/tests/02uni_newline.patch b/tests/02uni_newline.patch deleted file mode 100644 index 072649c..0000000 --- a/tests/02uni_newline.patch +++ /dev/null @@ -1,8 +0,0 @@ ---- 02uni_newline.from 2008-07-02 18:34:04 +0000 -+++ 02uni_newline.to 2008-07-02 18:34:08 +0000 -@@ -1,4 +1,3 @@ - - read_patch("fix_devpak_install.patch") - --asd -\ No newline at end of file diff --git a/tests/02uni_newline.to b/tests/02uni_newline.to deleted file mode 100644 index 6c62b1a..0000000 --- a/tests/02uni_newline.to +++ /dev/null @@ -1,3 +0,0 @@ - -read_patch("fix_devpak_install.patch") - diff --git a/tests/03trail_fname.from b/tests/03trail_fname.from deleted file mode 100644 index 01ff017..0000000 --- a/tests/03trail_fname.from +++ /dev/null @@ -1,8 +0,0 @@ -Tests: -- file not found -- trailing spaces in patch filenames -- already patched -- create new files -- remove files -- svn diff -- hg diff diff --git a/tests/03trail_fname.patch b/tests/03trail_fname.patch deleted file mode 100644 index ec29b30..0000000 --- a/tests/03trail_fname.patch +++ /dev/null @@ -1,12 +0,0 @@ ---- 03trail_fname.from -+++ 03trail_fname.to -@@ -1,7 +1,8 @@ - Tests: - - file not found --- trailing spaces in patch filenames - - already patched -+ -+Features: - - create new files - - remove files - - svn diff diff --git a/tests/03trail_fname.to b/tests/03trail_fname.to deleted file mode 100644 index 758b097..0000000 --- a/tests/03trail_fname.to +++ /dev/null @@ -1,9 +0,0 @@ -Tests: -- file not found -- already patched - -Features: -- create new files -- remove files -- svn diff -- hg diff diff --git a/tests/04can_patch.from b/tests/04can_patch.from deleted file mode 100644 index 0380b9d..0000000 --- a/tests/04can_patch.from +++ /dev/null @@ -1,40 +0,0 @@ -beta -beta -beta -beta -beta -beta -beta -alpha -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -alpha -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta \ No newline at end of file diff --git a/tests/04can_patch.patch b/tests/04can_patch.patch deleted file mode 100644 index 356beb0..0000000 --- a/tests/04can_patch.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- 04can_patch.from Sun Dec 27 09:53:51 2009 -+++ 04can_patch.to Sun Dec 27 09:54:06 2009 -@@ -6,8 +6,6 @@ - beta - beta - alpha --beta --beta - beta - beta - beta diff --git a/tests/04can_patch.to b/tests/04can_patch.to deleted file mode 100644 index 6e94e33..0000000 --- a/tests/04can_patch.to +++ /dev/null @@ -1,38 +0,0 @@ -beta -beta -beta -beta -beta -beta -beta -alpha -beta -beta -beta -beta -beta -beta -beta -beta -alpha -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta -beta \ No newline at end of file diff --git a/tests/05hg_change.from b/tests/05hg_change.from deleted file mode 100644 index 27d0a49..0000000 --- a/tests/05hg_change.from +++ /dev/null @@ -1,295 +0,0 @@ -""" -TestSuite - -Files/directories that comprise one test all have the same name, but a different extensions: -*.patch -*.from -*.to - -*.doctest - self contained doctest patch - -TODO: recheck input/output sources - -== Code Coverage == - -To refresh code coverage stats, get 'coverage' tool from -http://pypi.python.org/pypi/coverage/ and run this file with: - - coverage run run_tests.py - coverage html -d coverage - -On Windows it may be more convenient instead of `coverage` call -`python -m coverage.__main__` -""" - -import os -import sys -import re -import shutil -import unittest -import copy -from os import listdir -from os.path import abspath, dirname, exists, join, isdir -from tempfile import mkdtemp - -verbose = False -if "-v" in sys.argv or "--verbose" in sys.argv: - verbose = True - - -#: full path for directory with tests -tests_dir = dirname(abspath(__file__)) - - -# import patch.py from parent directory -save_path = sys.path -sys.path.insert(0, dirname(tests_dir)) -import patch -sys.path = save_path - - -# ---------------------------------------------------------------------------- -class TestPatchFiles(unittest.TestCase): - """ - unittest hack - test* methods are generated by add_test_methods() function - below dynamicallt using information about *.patch files from tests directory - - """ - def _assert_files_equal(self, file1, file2): - f1 = f2 = None - try: - f1 = open(file1, "rb") - f2 = open(file2, "rb") - for line in f1: - self.assertEqual(line, f2.readline()) - - finally: - if f2: - f2.close() - if f1: - f1.close() - - def _assert_dirs_equal(self, dir1, dir2, ignore=[]): - """ compare dir1 with reference dir2 - .svn dirs are ignored - - """ - # recursion here - e2list = listdir(dir2) - for e1 in listdir(dir1): - if e1 == ".svn": - continue - e1path = join(dir1, e1) - e2path = join(dir2, e1) - self.assert_(exists(e1path)) - self.assert_(exists(e2path), "%s does not exist" % e2path) - self.assert_(isdir(e1path) == isdir(e2path)) - if not isdir(e1path): - self._assert_files_equal(e1path, e2path) - else: - self._assert_dirs_equal(e1path, e2path) - e2list.remove(e1) - for e2 in e2list: - if e2 == ".svn" or e2 in ignore: - continue - self.fail("extra file or directory: %s" % e2) - - - def _run_test(self, testname): - """ - boilerplate for running *.patch file tests - """ - - # 1. create temp test directory - # 2. copy files - # 3. execute file-based patch - # 4. compare results - # 5. cleanup on success - - tmpdir = mkdtemp(prefix="%s."%testname) - - patch_file = join(tmpdir, "%s.patch" % testname) - shutil.copy(join(tests_dir, "%s.patch" % testname), patch_file) - - from_src = join(tests_dir, "%s.from" % testname) - from_tgt = join(tmpdir, "%s.from" % testname) - - if not isdir(from_src): - shutil.copy(from_src, from_tgt) - else: - for e in listdir(from_src): - if e == ".svn": - continue - epath = join(from_src, e) - if not isdir(epath): - shutil.copy(epath, join(tmpdir, e)) - else: - shutil.copytree(epath, join(tmpdir, e)) - - - # 3. - # test utility as a whole - patch_tool = join(dirname(tests_dir), "patch.py") - save_cwd = os.getcwdu() - os.chdir(tmpdir) - if verbose: - cmd = '%s %s "%s"' % (sys.executable, patch_tool, patch_file) - print "\n"+cmd - else: - cmd = '%s %s -q "%s"' % (sys.executable, patch_tool, patch_file) - ret = os.system(cmd) - assert ret == 0, "Error %d running test %s" % (ret, testname) - os.chdir(save_cwd) - - - # 4. - # compare results - if not isdir(from_src): - self._assert_files_equal(join(tests_dir, "%s.to" % testname), from_tgt) - else: - # need recursive compare - self._assert_dirs_equal(join(tests_dir, "%s.to" % testname), tmpdir, "%s.patch" % testname) - - - - shutil.rmtree(tmpdir) - return 0 - - -def add_test_methods(cls): - """ - hack to generate test* methods in target class - one - for each *.patch file in tests directory - """ - - # list testcases - every test starts with number - # and add them as test* methods - testptn = re.compile(r"^(?P\d{2,}.+)\.(?P[^\.]+)") - testset = sorted( set([testptn.match(e).group('name') for e in listdir(tests_dir) if testptn.match(e)]) ) - - for filename in testset: - methname = filename.replace(" ", "_") - def create_closure(): - name = filename - return lambda self: self._run_test(name) - setattr(cls, "test%s" % methname, create_closure()) - if verbose: - print "added test method %s to %s" % (methname, cls) -add_test_methods(TestPatchFiles) - -# ---------------------------------------------------------------------------- - -class TestCheckPatched(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() - os.chdir(tests_dir) - - def tearDown(self): - os.chdir(self.save_cwd) - - def test_patched_multiline(self): - pto = patch.fromfile("01uni_multi.patch") - os.chdir(join(tests_dir, "01uni_multi.to")) - self.assert_(pto.can_patch("updatedlg.cpp")) - - def test_can_patch_single_source(self): - pto2 = patch.fromfile("02uni_newline.patch") - self.assert_(pto2.can_patch("02uni_newline.from")) - - def test_can_patch_fails_on_target_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertEqual(None, pto3.can_patch("03trail_fname.to")) - self.assertEqual(None, pto3.can_patch("not_in_source.also")) - - def test_multiline_false_on_other_file(self): - pto = patch.fromfile("01uni_multi.patch") - os.chdir(join(tests_dir, "01uni_multi.from")) - self.assertFalse(pto.can_patch("updatedlg.cpp")) - - def test_single_false_on_other_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertFalse(pto3.can_patch("03trail_fname.from")) - - def test_can_patch_checks_source_filename_even_if_target_can_be_patched(self): - pto2 = patch.fromfile("04can_patch.patch") - self.assertFalse(pto2.can_patch("04can_patch.to")) - -# ---------------------------------------------------------------------------- - -class TestPatchParse(unittest.TestCase): - def test_fromstring(self): - try: - f = open(join(tests_dir, "01uni_multi.patch"), "rb") - readstr = f.read() - finally: - f.close() - pst = patch.fromstring(readstr) - self.assertEqual(len(pst), 5) - - def test_no_header_for_plain_diff_with_single_file(self): - pto = patch.fromfile(join(tests_dir, "03trail_fname.patch")) - self.assertEqual(pto.items[0].header, []) - - def test_header_for_second_file_in_svn_diff(self): - pto = patch.fromfile(join(tests_dir, "01uni_multi.patch")) - self.assertEqual(pto.items[1].header[0], 'Index: updatedlg.h\r\n') - self.assert_(pto.items[1].header[1].startswith('=====')) - - def test_fail_missing_hunk_line(self): - fp = open(join(tests_dir, "data/failing/missing-hunk-line.diff")) - pto = patch.PatchSet() - self.assertNotEqual(pto.parse(fp), True) - fp.close() - - def test_fail_absolute_path(self): - fp = open(join(tests_dir, "data/failing/absolute-path.diff")) - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - - def test_fail_parent_path(self): - fp = open(join(tests_dir, "data/failing/parent-path.diff")) - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - -class TestPatchSetDetect(unittest.TestCase): - def test_svn_detected(self): - pto = patch.fromfile(join(tests_dir, "01uni_multi.patch")) - self.assertEqual(pto.type, patch.SVN) - - def test_hg_detected(self): - pto = patch.fromfile(join(tests_dir, "data/hg-added-file.diff")) - self.assertEqual(pto.type, patch.HG) - -class TestPatchApply(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() - self.tmpdir = mkdtemp(prefix=self.__class__.__name__) - os.chdir(self.tmpdir) - - def tearDown(self): - os.chdir(self.save_cwd) - shutil.rmtree(self.tmpdir) - - def tmpcopy(self, filenames): - """copy file(s) from test_dir to self.tmpdir""" - for f in filenames: - shutil.copy(join(tests_dir, f), self.tmpdir) - - def test_apply_returns_false_of_failure(self): - self.tmpcopy(['data/failing/non-empty-patch-for-empty-file.diff', - 'data/failing/upload.py']) - pto = patch.fromfile('non-empty-patch-for-empty-file.diff') - self.assertFalse(pto.apply()) - - def test_apply_returns_true_on_success(self): - self.tmpcopy(['03trail_fname.patch', - '03trail_fname.from']) - pto = patch.fromfile('03trail_fname.patch') - self.assert_(pto.apply()) - -# ---------------------------------------------------------------------------- - -if __name__ == '__main__': - unittest.main() diff --git a/tests/05hg_change.patch b/tests/05hg_change.patch deleted file mode 100644 index ccb1986..0000000 --- a/tests/05hg_change.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff -r 603f07175741 05hg_change.from ---- a/05hg_change.from Wed Mar 16 08:21:44 2011 +0200 -+++ b/05hg_change.to Wed Mar 16 10:08:57 2011 +0200 -@@ -262,6 +262,10 @@ - pto = patch.fromfile(join(tests_dir, "data/hg-added-file.diff")) - self.assertEqual(pto.type, patch.HG) - -+ def test_git_changed_detected(self): -+ pto = patch.fromfile(join(tests_dir, "data/git-changed-file.diff")) -+ self.assertEqual(pto.type, patch.GIT) -+ - class TestPatchApply(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() diff --git a/tests/05hg_change.to b/tests/05hg_change.to deleted file mode 100644 index e3aa3d2..0000000 --- a/tests/05hg_change.to +++ /dev/null @@ -1,299 +0,0 @@ -""" -TestSuite - -Files/directories that comprise one test all have the same name, but a different extensions: -*.patch -*.from -*.to - -*.doctest - self contained doctest patch - -TODO: recheck input/output sources - -== Code Coverage == - -To refresh code coverage stats, get 'coverage' tool from -http://pypi.python.org/pypi/coverage/ and run this file with: - - coverage run run_tests.py - coverage html -d coverage - -On Windows it may be more convenient instead of `coverage` call -`python -m coverage.__main__` -""" - -import os -import sys -import re -import shutil -import unittest -import copy -from os import listdir -from os.path import abspath, dirname, exists, join, isdir -from tempfile import mkdtemp - -verbose = False -if "-v" in sys.argv or "--verbose" in sys.argv: - verbose = True - - -#: full path for directory with tests -tests_dir = dirname(abspath(__file__)) - - -# import patch.py from parent directory -save_path = sys.path -sys.path.insert(0, dirname(tests_dir)) -import patch -sys.path = save_path - - -# ---------------------------------------------------------------------------- -class TestPatchFiles(unittest.TestCase): - """ - unittest hack - test* methods are generated by add_test_methods() function - below dynamicallt using information about *.patch files from tests directory - - """ - def _assert_files_equal(self, file1, file2): - f1 = f2 = None - try: - f1 = open(file1, "rb") - f2 = open(file2, "rb") - for line in f1: - self.assertEqual(line, f2.readline()) - - finally: - if f2: - f2.close() - if f1: - f1.close() - - def _assert_dirs_equal(self, dir1, dir2, ignore=[]): - """ compare dir1 with reference dir2 - .svn dirs are ignored - - """ - # recursion here - e2list = listdir(dir2) - for e1 in listdir(dir1): - if e1 == ".svn": - continue - e1path = join(dir1, e1) - e2path = join(dir2, e1) - self.assert_(exists(e1path)) - self.assert_(exists(e2path), "%s does not exist" % e2path) - self.assert_(isdir(e1path) == isdir(e2path)) - if not isdir(e1path): - self._assert_files_equal(e1path, e2path) - else: - self._assert_dirs_equal(e1path, e2path) - e2list.remove(e1) - for e2 in e2list: - if e2 == ".svn" or e2 in ignore: - continue - self.fail("extra file or directory: %s" % e2) - - - def _run_test(self, testname): - """ - boilerplate for running *.patch file tests - """ - - # 1. create temp test directory - # 2. copy files - # 3. execute file-based patch - # 4. compare results - # 5. cleanup on success - - tmpdir = mkdtemp(prefix="%s."%testname) - - patch_file = join(tmpdir, "%s.patch" % testname) - shutil.copy(join(tests_dir, "%s.patch" % testname), patch_file) - - from_src = join(tests_dir, "%s.from" % testname) - from_tgt = join(tmpdir, "%s.from" % testname) - - if not isdir(from_src): - shutil.copy(from_src, from_tgt) - else: - for e in listdir(from_src): - if e == ".svn": - continue - epath = join(from_src, e) - if not isdir(epath): - shutil.copy(epath, join(tmpdir, e)) - else: - shutil.copytree(epath, join(tmpdir, e)) - - - # 3. - # test utility as a whole - patch_tool = join(dirname(tests_dir), "patch.py") - save_cwd = os.getcwdu() - os.chdir(tmpdir) - if verbose: - cmd = '%s %s "%s"' % (sys.executable, patch_tool, patch_file) - print "\n"+cmd - else: - cmd = '%s %s -q "%s"' % (sys.executable, patch_tool, patch_file) - ret = os.system(cmd) - assert ret == 0, "Error %d running test %s" % (ret, testname) - os.chdir(save_cwd) - - - # 4. - # compare results - if not isdir(from_src): - self._assert_files_equal(join(tests_dir, "%s.to" % testname), from_tgt) - else: - # need recursive compare - self._assert_dirs_equal(join(tests_dir, "%s.to" % testname), tmpdir, "%s.patch" % testname) - - - - shutil.rmtree(tmpdir) - return 0 - - -def add_test_methods(cls): - """ - hack to generate test* methods in target class - one - for each *.patch file in tests directory - """ - - # list testcases - every test starts with number - # and add them as test* methods - testptn = re.compile(r"^(?P\d{2,}.+)\.(?P[^\.]+)") - testset = sorted( set([testptn.match(e).group('name') for e in listdir(tests_dir) if testptn.match(e)]) ) - - for filename in testset: - methname = filename.replace(" ", "_") - def create_closure(): - name = filename - return lambda self: self._run_test(name) - setattr(cls, "test%s" % methname, create_closure()) - if verbose: - print "added test method %s to %s" % (methname, cls) -add_test_methods(TestPatchFiles) - -# ---------------------------------------------------------------------------- - -class TestCheckPatched(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() - os.chdir(tests_dir) - - def tearDown(self): - os.chdir(self.save_cwd) - - def test_patched_multiline(self): - pto = patch.fromfile("01uni_multi.patch") - os.chdir(join(tests_dir, "01uni_multi.to")) - self.assert_(pto.can_patch("updatedlg.cpp")) - - def test_can_patch_single_source(self): - pto2 = patch.fromfile("02uni_newline.patch") - self.assert_(pto2.can_patch("02uni_newline.from")) - - def test_can_patch_fails_on_target_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertEqual(None, pto3.can_patch("03trail_fname.to")) - self.assertEqual(None, pto3.can_patch("not_in_source.also")) - - def test_multiline_false_on_other_file(self): - pto = patch.fromfile("01uni_multi.patch") - os.chdir(join(tests_dir, "01uni_multi.from")) - self.assertFalse(pto.can_patch("updatedlg.cpp")) - - def test_single_false_on_other_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertFalse(pto3.can_patch("03trail_fname.from")) - - def test_can_patch_checks_source_filename_even_if_target_can_be_patched(self): - pto2 = patch.fromfile("04can_patch.patch") - self.assertFalse(pto2.can_patch("04can_patch.to")) - -# ---------------------------------------------------------------------------- - -class TestPatchParse(unittest.TestCase): - def test_fromstring(self): - try: - f = open(join(tests_dir, "01uni_multi.patch"), "rb") - readstr = f.read() - finally: - f.close() - pst = patch.fromstring(readstr) - self.assertEqual(len(pst), 5) - - def test_no_header_for_plain_diff_with_single_file(self): - pto = patch.fromfile(join(tests_dir, "03trail_fname.patch")) - self.assertEqual(pto.items[0].header, []) - - def test_header_for_second_file_in_svn_diff(self): - pto = patch.fromfile(join(tests_dir, "01uni_multi.patch")) - self.assertEqual(pto.items[1].header[0], 'Index: updatedlg.h\r\n') - self.assert_(pto.items[1].header[1].startswith('=====')) - - def test_fail_missing_hunk_line(self): - fp = open(join(tests_dir, "data/failing/missing-hunk-line.diff")) - pto = patch.PatchSet() - self.assertNotEqual(pto.parse(fp), True) - fp.close() - - def test_fail_absolute_path(self): - fp = open(join(tests_dir, "data/failing/absolute-path.diff")) - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - - def test_fail_parent_path(self): - fp = open(join(tests_dir, "data/failing/parent-path.diff")) - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - -class TestPatchSetDetect(unittest.TestCase): - def test_svn_detected(self): - pto = patch.fromfile(join(tests_dir, "01uni_multi.patch")) - self.assertEqual(pto.type, patch.SVN) - - def test_hg_detected(self): - pto = patch.fromfile(join(tests_dir, "data/hg-added-file.diff")) - self.assertEqual(pto.type, patch.HG) - - def test_git_changed_detected(self): - pto = patch.fromfile(join(tests_dir, "data/git-changed-file.diff")) - self.assertEqual(pto.type, patch.GIT) - -class TestPatchApply(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() - self.tmpdir = mkdtemp(prefix=self.__class__.__name__) - os.chdir(self.tmpdir) - - def tearDown(self): - os.chdir(self.save_cwd) - shutil.rmtree(self.tmpdir) - - def tmpcopy(self, filenames): - """copy file(s) from test_dir to self.tmpdir""" - for f in filenames: - shutil.copy(join(tests_dir, f), self.tmpdir) - - def test_apply_returns_false_of_failure(self): - self.tmpcopy(['data/failing/non-empty-patch-for-empty-file.diff', - 'data/failing/upload.py']) - pto = patch.fromfile('non-empty-patch-for-empty-file.diff') - self.assertFalse(pto.apply()) - - def test_apply_returns_true_on_success(self): - self.tmpcopy(['03trail_fname.patch', - '03trail_fname.from']) - pto = patch.fromfile('03trail_fname.patch') - self.assert_(pto.apply()) - -# ---------------------------------------------------------------------------- - -if __name__ == '__main__': - unittest.main() diff --git a/tests/06nested/.hgignore b/tests/06nested/.hgignore deleted file mode 100644 index 251183e..0000000 --- a/tests/06nested/.hgignore +++ /dev/null @@ -1,11 +0,0 @@ -syntax: glob -*.pyc -*.pyo -build -pyglet.*.log -*.orig -.DS_Store -doc/api -doc/_build -doc/internal/build.rst -website/dist diff --git a/tests/06nested/06nested.patch b/tests/06nested/06nested.patch deleted file mode 100644 index 1d0ec92..0000000 --- a/tests/06nested/06nested.patch +++ /dev/null @@ -1,112 +0,0 @@ -diff -r dcee1189d959 .hgignore ---- a/.hgignore Tue Oct 23 01:05:32 2012 +0300 -+++ b/.hgignore Mon Dec 03 11:32:26 2012 +0300 -@@ -5,6 +5,7 @@ - pyglet.*.log - *.orig - .DS_Store -+_htmlapi - doc/api - doc/_build - doc/internal/build.rst -diff -r dcee1189d959 examples/font_comparison.py ---- a/examples/font_comparison.py Tue Oct 23 01:05:32 2012 +0300 -+++ b/examples/font_comparison.py Mon Dec 03 10:06:28 2012 +0300 -@@ -45,13 +45,14 @@ - FONTS = ['Andale Mono', 'Consolas', 'Inconsolata', 'Inconsolata-dz', 'Monaco', - 'Menlo'] - --SAMPLE = '''class Spam(object): -- def __init__(self): -- # The quick brown fox -- self.spam = {"jumped": 'over'} -+SAMPLE = '''\ -+class Spam(object): -+ def __init__(self): -+ # The quick brown fox -+ self.spam = {"jumped": 'over'} - @the -- def lazy(self, *dog): -- self.dog = [lazy, lazy]''' -+ def lazy(self, *dog): -+ self.dog = [lazy, lazy]''' - - class Window(pyglet.window.Window): - font_num = 0 -diff -r dcee1189d959 experimental/console.py ---- a/experimental/console.py Tue Oct 23 01:05:32 2012 +0300 -+++ b/experimental/console.py Mon Dec 03 10:06:28 2012 +0300 -@@ -18,7 +18,7 @@ - - class Console(object): - def __init__(self, width, height, globals=None, locals=None): -- self.font = pyglet.text.default_font_factory.get_font('bitstream vera sans mono', 12) -+ self.font = pyglet.font.load('bitstream vera sans mono', 12) - self.lines = [] - self.buffer = '' - self.pre_buffer = '' -@@ -29,7 +29,7 @@ - self.write_pending = '' - - self.width, self.height = (width, height) -- self.max_lines = self.height / self.font.glyph_height - 1 -+ self.max_lines = self.height / (self.font.ascent - self.font.descent) - 1 - - self.write('pyglet command console\n') - self.write('Version %s\n' % __version__) -diff -r dcee1189d959 pyglet/font/win32.py ---- a/pyglet/font/win32.py Tue Oct 23 01:05:32 2012 +0300 -+++ b/pyglet/font/win32.py Mon Dec 03 10:06:28 2012 +0300 -@@ -45,6 +45,7 @@ - from sys import byteorder - import pyglet - from pyglet.font import base -+from pyglet.font import win32query - import pyglet.image - from pyglet.libs.win32.constants import * - from pyglet.libs.win32.types import * -@@ -262,9 +263,7 @@ - - @classmethod - def have_font(cls, name): -- # CreateFontIndirect always returns a font... have to work out -- # something with EnumFontFamily... TODO -- return True -+ return win32query.have_font(name) - - @classmethod - def add_font_data(cls, data): -diff -r dcee1189d959 tests/app/EVENT_LOOP.py ---- a/tests/app/EVENT_LOOP.py Tue Oct 23 01:05:32 2012 +0300 -+++ b/tests/app/EVENT_LOOP.py Mon Dec 03 10:06:28 2012 +0300 -@@ -21,8 +21,8 @@ - - class EVENT_LOOP(unittest.TestCase): - def t_scheduled(self, interval, iterations, sleep_time=0): -- print 'Test interval=%s, iterations=%s, sleep=%s' % (interval, -- iterations, sleep_time) -+ print('Test interval=%s, iterations=%s, sleep=%s' % (interval, -+ iterations, sleep_time)) - warmup_iterations = iterations - - self.last_t = 0. -@@ -76,6 +76,6 @@ - - if __name__ == '__main__': - if pyglet.version != '1.2dev': -- print 'Wrong version of pyglet imported; please check your PYTHONPATH' -+ print('Wrong version of pyglet imported; please check your PYTHONPATH') - else: - unittest.main() -diff -r dcee1189d959 tests/font/SYSTEM.py ---- a/tests/font/SYSTEM.py Tue Oct 23 01:05:32 2012 +0300 -+++ b/tests/font/SYSTEM.py Mon Dec 03 10:06:28 2012 +0300 -@@ -20,7 +20,7 @@ - if sys.platform == 'darwin': - font_name = 'Helvetica' - elif sys.platform in ('win32', 'cygwin'): -- font_name = 'Arial' -+ font_name = 'Modern No.20' - else: - font_name = 'Arial' - diff --git a/tests/06nested/[result]/.hgignore b/tests/06nested/[result]/.hgignore deleted file mode 100644 index b306c6a..0000000 --- a/tests/06nested/[result]/.hgignore +++ /dev/null @@ -1,12 +0,0 @@ -syntax: glob -*.pyc -*.pyo -build -pyglet.*.log -*.orig -.DS_Store -_htmlapi -doc/api -doc/_build -doc/internal/build.rst -website/dist diff --git a/tests/06nested/[result]/examples/font_comparison.py b/tests/06nested/[result]/examples/font_comparison.py deleted file mode 100644 index 7e968c5..0000000 --- a/tests/06nested/[result]/examples/font_comparison.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python -# ---------------------------------------------------------------------------- -# pyglet -# Copyright (c) 2006-2008 Alex Holkner -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# * Neither the name of pyglet nor the names of its -# contributors may be used to endorse or promote products -# derived from this software without specific prior written -# permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# ---------------------------------------------------------------------------- - -'''A simple tool that may be used to compare font faces. - -Use the left/right cursor keys to change font faces. -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id: $' -import pyglet - -FONTS = ['Andale Mono', 'Consolas', 'Inconsolata', 'Inconsolata-dz', 'Monaco', - 'Menlo'] - -SAMPLE = '''\ -class Spam(object): - def __init__(self): - # The quick brown fox - self.spam = {"jumped": 'over'} - @the - def lazy(self, *dog): - self.dog = [lazy, lazy]''' - -class Window(pyglet.window.Window): - font_num = 0 - def on_text_motion(self, motion): - if motion == pyglet.window.key.MOTION_RIGHT: - self.font_num += 1 - if self.font_num == len(FONTS): - self.font_num = 0 - elif motion == pyglet.window.key.MOTION_LEFT: - self.font_num -= 1 - if self.font_num < 0: - self.font_num = len(FONTS) - 1 - - face = FONTS[self.font_num] - self.head = pyglet.text.Label(face, font_size=24, y=0, - anchor_y='bottom') - self.text = pyglet.text.Label(SAMPLE, font_name=face, font_size=18, - y=self.height, anchor_y='top', width=self.width, multiline=True) - - def on_draw(self): - self.clear() - self.head.draw() - self.text.draw() - -window = Window() -window.on_text_motion(None) -pyglet.app.run() diff --git a/tests/06nested/[result]/experimental/console.py b/tests/06nested/[result]/experimental/console.py deleted file mode 100644 index fd80e6b..0000000 --- a/tests/06nested/[result]/experimental/console.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -''' -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id$' - -import code -import sys -import traceback - -import pyglet.event -import pyglet.text -from pyglet.window import key - -from pyglet.gl import * - -class Console(object): - def __init__(self, width, height, globals=None, locals=None): - self.font = pyglet.font.load('bitstream vera sans mono', 12) - self.lines = [] - self.buffer = '' - self.pre_buffer = '' - self.prompt = '>>> ' - self.prompt2 = '... ' - self.globals = globals - self.locals = locals - self.write_pending = '' - - self.width, self.height = (width, height) - self.max_lines = self.height / (self.font.ascent - self.font.descent) - 1 - - self.write('pyglet command console\n') - self.write('Version %s\n' % __version__) - - def on_key_press(self, symbol, modifiers): - # TODO cursor control / line editing - if modifiers & key.key.MOD_CTRL and symbol == key.key.C: - self.buffer = '' - self.pre_buffer = '' - return - if symbol == key.key.ENTER: - self.write('%s%s\n' % (self.get_prompt(), self.buffer)) - self.execute(self.pre_buffer + self.buffer) - self.buffer = '' - return - if symbol == key.key.BACKSPACE: - self.buffer = self.buffer[:-1] - return - return EVENT_UNHANDLED - - def on_text(self, text): - if ' ' <= text <= '~': - self.buffer += text - if 0xae <= ord(text) <= 0xff: - self.buffer += text - - def write(self, text): - if self.write_pending: - text = self.write_pending + text - self.write_pending = '' - - if type(text) in (str, unicode): - text = text.split('\n') - - if text[-1] != '': - self.write_pending = text[-1] - del text[-1] - - self.lines = [pyglet.text.layout_text(line.strip(), font=self.font) - for line in text] + self.lines - - if len(self.lines) > self.max_lines: - del self.lines[-1] - - def execute(self, input): - old_stderr, old_stdout = sys.stderr, sys.stdout - sys.stderr = sys.stdout = self - try: - c = code.compile_command(input, '') - if c is None: - self.pre_buffer = '%s\n' % input - else: - self.pre_buffer = '' - result = eval(c, self.globals, self.locals) - if result is not None: - self.write('%r\n' % result) - except: - traceback.print_exc() - self.pre_buffer = '' - sys.stderr = old_stderr - sys.stdout = old_stdout - - def get_prompt(self): - if self.pre_buffer: - return self.prompt2 - return self.prompt - - __last = None - def draw(self): - pyglet.text.begin() - glPushMatrix() - glTranslatef(0, self.height, 0) - for line in self.lines[::-1]: - line.draw() - glTranslatef(0, -self.font.glyph_height, 0) - line = self.get_prompt() + self.buffer - if self.__last is None or line != self.__last[0]: - self.__last = (line, pyglet.text.layout_text(line.strip(), - font=self.font)) - self.__last[1].draw() - glPopMatrix() - - pyglet.text.end() - -if __name__ == '__main__': - from pyglet.window import * - from pyglet.window.event import * - from pyglet import clock - w1 = Window(width=600, height=400) - console = Console(w1.width, w1.height) - - w1.push_handlers(console) - - c = clock.Clock() - - glMatrixMode(GL_PROJECTION) - glLoadIdentity() - glOrtho(0, w1.width, 0, w1.height, -1, 1) - glEnable(GL_COLOR_MATERIAL) - - glMatrixMode(GL_MODELVIEW) - glClearColor(1, 1, 1, 1) - while not w1.has_exit: - c.set_fps(60) - w1.dispatch_events() - glClear(GL_COLOR_BUFFER_BIT) - console.draw() - w1.flip() - diff --git a/tests/06nested/[result]/pyglet/font/win32.py b/tests/06nested/[result]/pyglet/font/win32.py deleted file mode 100644 index d3f2c8e..0000000 --- a/tests/06nested/[result]/pyglet/font/win32.py +++ /dev/null @@ -1,273 +0,0 @@ -# ---------------------------------------------------------------------------- -# pyglet -# Copyright (c) 2006-2008 Alex Holkner -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# * Neither the name of pyglet nor the names of its -# contributors may be used to endorse or promote products -# derived from this software without specific prior written -# permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# ---------------------------------------------------------------------------- - -''' -''' - -# TODO Windows Vista: need to call SetProcessDPIAware? May affect GDI+ calls -# as well as font. - -from ctypes import * -import ctypes -import math - -from sys import byteorder -import pyglet -from pyglet.font import base -from pyglet.font import win32query -import pyglet.image -from pyglet.libs.win32.constants import * -from pyglet.libs.win32.types import * -from pyglet.libs.win32 import _gdi32 as gdi32, _user32 as user32 -from pyglet.libs.win32 import _kernel32 as kernel32 -from pyglet.compat import asbytes - -_debug_font = pyglet.options['debug_font'] - - -def str_ucs2(text): - if byteorder == 'big': - text = text.encode('utf_16_be') - else: - text = text.encode('utf_16_le') # explicit endian avoids BOM - return create_string_buffer(text + '\0') - -_debug_dir = 'debug_font' -def _debug_filename(base, extension): - import os - if not os.path.exists(_debug_dir): - os.makedirs(_debug_dir) - name = '%s-%%d.%%s' % os.path.join(_debug_dir, base) - num = 1 - while os.path.exists(name % (num, extension)): - num += 1 - return name % (num, extension) - -def _debug_image(image, name): - filename = _debug_filename(name, 'png') - image.save(filename) - _debug('Saved image %r to %s' % (image, filename)) - -_debug_logfile = None -def _debug(msg): - global _debug_logfile - if not _debug_logfile: - _debug_logfile = open(_debug_filename('log', 'txt'), 'wt') - _debug_logfile.write(msg + '\n') - -class Win32GlyphRenderer(base.GlyphRenderer): - _bitmap = None - _dc = None - _bitmap_rect = None - - def __init__(self, font): - super(Win32GlyphRenderer, self).__init__(font) - self.font = font - - # Pessimistically round up width and height to 4 byte alignment - width = font.max_glyph_width - height = font.ascent - font.descent - width = (width | 0x3) + 1 - height = (height | 0x3) + 1 - self._create_bitmap(width, height) - - gdi32.SelectObject(self._dc, self.font.hfont) - - def _create_bitmap(self, width, height): - pass - - def render(self, text): - raise NotImplementedError('abstract') - -class GDIGlyphRenderer(Win32GlyphRenderer): - def __del__(self): - try: - if self._dc: - gdi32.DeleteDC(self._dc) - if self._bitmap: - gdi32.DeleteObject(self._bitmap) - except: - pass - - def render(self, text): - # Attempt to get ABC widths (only for TrueType) - abc = ABC() - if gdi32.GetCharABCWidthsW(self._dc, - ord(text), ord(text), byref(abc)): - width = abc.abcB - lsb = abc.abcA - advance = abc.abcA + abc.abcB + abc.abcC - else: - width_buf = c_int() - gdi32.GetCharWidth32W(self._dc, - ord(text), ord(text), byref(width_buf)) - width = width_buf.value - lsb = 0 - advance = width - - # Can't get glyph-specific dimensions, use whole line-height. - height = self._bitmap_height - image = self._get_image(text, width, height, lsb) - - glyph = self.font.create_glyph(image) - glyph.set_bearings(-self.font.descent, lsb, advance) - - if _debug_font: - _debug('%r.render(%s)' % (self, text)) - _debug('abc.abcA = %r' % abc.abcA) - _debug('abc.abcB = %r' % abc.abcB) - _debug('abc.abcC = %r' % abc.abcC) - _debug('width = %r' % width) - _debug('height = %r' % height) - _debug('lsb = %r' % lsb) - _debug('advance = %r' % advance) - _debug_image(image, 'glyph_%s' % text) - _debug_image(self.font.textures[0], 'tex_%s' % text) - - return glyph - - def _get_image(self, text, width, height, lsb): - # There's no such thing as a greyscale bitmap format in GDI. We can - # create an 8-bit palette bitmap with 256 shades of grey, but - # unfortunately antialiasing will not work on such a bitmap. So, we - # use a 32-bit bitmap and use the red channel as OpenGL's alpha. - - gdi32.SelectObject(self._dc, self._bitmap) - gdi32.SelectObject(self._dc, self.font.hfont) - gdi32.SetBkColor(self._dc, 0x0) - gdi32.SetTextColor(self._dc, 0x00ffffff) - gdi32.SetBkMode(self._dc, OPAQUE) - - # Draw to DC - user32.FillRect(self._dc, byref(self._bitmap_rect), self._black) - gdi32.ExtTextOutA(self._dc, -lsb, 0, 0, None, text, - len(text), None) - gdi32.GdiFlush() - - # Create glyph object and copy bitmap data to texture - image = pyglet.image.ImageData(width, height, - 'AXXX', self._bitmap_data, self._bitmap_rect.right * 4) - return image - - def _create_bitmap(self, width, height): - self._black = gdi32.GetStockObject(BLACK_BRUSH) - self._white = gdi32.GetStockObject(WHITE_BRUSH) - - if self._dc: - gdi32.ReleaseDC(self._dc) - if self._bitmap: - gdi32.DeleteObject(self._bitmap) - - pitch = width * 4 - data = POINTER(c_byte * (height * pitch))() - info = BITMAPINFO() - info.bmiHeader.biSize = sizeof(info.bmiHeader) - info.bmiHeader.biWidth = width - info.bmiHeader.biHeight = height - info.bmiHeader.biPlanes = 1 - info.bmiHeader.biBitCount = 32 - info.bmiHeader.biCompression = BI_RGB - - self._dc = gdi32.CreateCompatibleDC(None) - self._bitmap = gdi32.CreateDIBSection(None, - byref(info), DIB_RGB_COLORS, byref(data), None, - 0) - # Spookiness: the above line causes a "not enough storage" error, - # even though that error cannot be generated according to docs, - # and everything works fine anyway. Call SetLastError to clear it. - kernel32.SetLastError(0) - - self._bitmap_data = data.contents - self._bitmap_rect = RECT() - self._bitmap_rect.left = 0 - self._bitmap_rect.right = width - self._bitmap_rect.top = 0 - self._bitmap_rect.bottom = height - self._bitmap_height = height - - if _debug_font: - _debug('%r._create_dc(%d, %d)' % (self, width, height)) - _debug('_dc = %r' % self._dc) - _debug('_bitmap = %r' % self._bitmap) - _debug('pitch = %r' % pitch) - _debug('info.bmiHeader.biSize = %r' % info.bmiHeader.biSize) - -class Win32Font(base.Font): - glyph_renderer_class = GDIGlyphRenderer - - def __init__(self, name, size, bold=False, italic=False, dpi=None): - super(Win32Font, self).__init__() - - self.logfont = self.get_logfont(name, size, bold, italic, dpi) - self.hfont = gdi32.CreateFontIndirectA(byref(self.logfont)) - - # Create a dummy DC for coordinate mapping - dc = user32.GetDC(0) - metrics = TEXTMETRIC() - gdi32.SelectObject(dc, self.hfont) - gdi32.GetTextMetricsA(dc, byref(metrics)) - self.ascent = metrics.tmAscent - self.descent = -metrics.tmDescent - self.max_glyph_width = metrics.tmMaxCharWidth - - @staticmethod - def get_logfont(name, size, bold, italic, dpi): - # Create a dummy DC for coordinate mapping - dc = user32.GetDC(0) - if dpi is None: - dpi = 96 - logpixelsy = dpi - - logfont = LOGFONT() - # Conversion of point size to device pixels - logfont.lfHeight = int(-size * logpixelsy // 72) - if bold: - logfont.lfWeight = FW_BOLD - else: - logfont.lfWeight = FW_NORMAL - logfont.lfItalic = italic - logfont.lfFaceName = asbytes(name) - logfont.lfQuality = ANTIALIASED_QUALITY - return logfont - - @classmethod - def have_font(cls, name): - return win32query.have_font(name) - - @classmethod - def add_font_data(cls, data): - numfonts = c_uint32() - gdi32.AddFontMemResourceEx(data, len(data), 0, byref(numfonts)) - -# ... \ No newline at end of file diff --git a/tests/06nested/[result]/tests/app/EVENT_LOOP.py b/tests/06nested/[result]/tests/app/EVENT_LOOP.py deleted file mode 100644 index 28c7e9e..0000000 --- a/tests/06nested/[result]/tests/app/EVENT_LOOP.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python -'''Test that the event loop can do timing. - -The test will display a series of intervals, iterations and sleep times. -It should then display an incrementing number up to 2x the number of -iterations, at a rate determined by the interval. -''' - -import sys -import unittest - -import pyglet - -__noninteractive = True - -if sys.platform in ('win32', 'cygwin'): - from time import clock as time -else: - from time import time -from time import sleep - -class EVENT_LOOP(unittest.TestCase): - def t_scheduled(self, interval, iterations, sleep_time=0): - print('Test interval=%s, iterations=%s, sleep=%s' % (interval, - iterations, sleep_time)) - warmup_iterations = iterations - - self.last_t = 0. - self.timer_count = 0 - def f(dt): - sys.stdout.write('%s\r' % self.timer_count) - sys.stdout.flush() - t = time() - self.timer_count += 1 - tc = self.timer_count - if tc > warmup_iterations: - self.assertAlmostEqual(dt, interval, places=2) - self.assertAlmostEqual(t - self.last_t, interval, places=2) - self.last_t = t - - if self.timer_count > iterations + warmup_iterations: - pyglet.app.exit() - if sleep_time: - sleep(sleep_time) - - pyglet.clock.schedule_interval(f, interval) - try: - pyglet.app.run() - finally: - pyglet.clock.unschedule(f) - print - - def test_1_5(self): - self.t_scheduled(1, 5, 0) - - def test_1_5_d5(self): - self.t_scheduled(1, 5, 0.5) - - def test_d1_50(self): - self.t_scheduled(.1, 50) - - def test_d1_50_d05(self): - self.t_scheduled(.1, 50, 0.05) - - def test_d05_50(self): - self.t_scheduled(.05, 50) - - def test_d05_50_d03(self): - self.t_scheduled(.05, 50, 0.03) - - def test_d02_50(self): - self.t_scheduled(.02, 50) - - def test_d01_50(self): - self.t_scheduled(.01, 50) - -if __name__ == '__main__': - if pyglet.version != '1.2dev': - print('Wrong version of pyglet imported; please check your PYTHONPATH') - else: - unittest.main() diff --git a/tests/06nested/[result]/tests/font/SYSTEM.py b/tests/06nested/[result]/tests/font/SYSTEM.py deleted file mode 100644 index d9634b1..0000000 --- a/tests/06nested/[result]/tests/font/SYSTEM.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -'''Test that a font likely to be installed on the computer can be -loaded and displayed correctly. - -One window will open, it should show "Quickly brown fox" at 24pt using: - - * "Helvetica" on Mac OS X - * "Arial" on Windows - -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id: $' - -import unittest -import sys -from . import base_text - -if sys.platform == 'darwin': - font_name = 'Helvetica' -elif sys.platform in ('win32', 'cygwin'): - font_name = 'Modern No.20' -else: - font_name = 'Arial' - -class TEST_SYSTEM(base_text.TextTestBase): - font_name = font_name - font_size = 24 - -if __name__ == '__main__': - unittest.main() diff --git a/tests/06nested/examples/font_comparison.py b/tests/06nested/examples/font_comparison.py deleted file mode 100644 index 1327a45..0000000 --- a/tests/06nested/examples/font_comparison.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python -# ---------------------------------------------------------------------------- -# pyglet -# Copyright (c) 2006-2008 Alex Holkner -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# * Neither the name of pyglet nor the names of its -# contributors may be used to endorse or promote products -# derived from this software without specific prior written -# permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# ---------------------------------------------------------------------------- - -'''A simple tool that may be used to compare font faces. - -Use the left/right cursor keys to change font faces. -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id: $' -import pyglet - -FONTS = ['Andale Mono', 'Consolas', 'Inconsolata', 'Inconsolata-dz', 'Monaco', - 'Menlo'] - -SAMPLE = '''class Spam(object): - def __init__(self): - # The quick brown fox - self.spam = {"jumped": 'over'} - @the - def lazy(self, *dog): - self.dog = [lazy, lazy]''' - -class Window(pyglet.window.Window): - font_num = 0 - def on_text_motion(self, motion): - if motion == pyglet.window.key.MOTION_RIGHT: - self.font_num += 1 - if self.font_num == len(FONTS): - self.font_num = 0 - elif motion == pyglet.window.key.MOTION_LEFT: - self.font_num -= 1 - if self.font_num < 0: - self.font_num = len(FONTS) - 1 - - face = FONTS[self.font_num] - self.head = pyglet.text.Label(face, font_size=24, y=0, - anchor_y='bottom') - self.text = pyglet.text.Label(SAMPLE, font_name=face, font_size=18, - y=self.height, anchor_y='top', width=self.width, multiline=True) - - def on_draw(self): - self.clear() - self.head.draw() - self.text.draw() - -window = Window() -window.on_text_motion(None) -pyglet.app.run() diff --git a/tests/06nested/experimental/console.py b/tests/06nested/experimental/console.py deleted file mode 100644 index 3371bf3..0000000 --- a/tests/06nested/experimental/console.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -''' -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id$' - -import code -import sys -import traceback - -import pyglet.event -import pyglet.text -from pyglet.window import key - -from pyglet.gl import * - -class Console(object): - def __init__(self, width, height, globals=None, locals=None): - self.font = pyglet.text.default_font_factory.get_font('bitstream vera sans mono', 12) - self.lines = [] - self.buffer = '' - self.pre_buffer = '' - self.prompt = '>>> ' - self.prompt2 = '... ' - self.globals = globals - self.locals = locals - self.write_pending = '' - - self.width, self.height = (width, height) - self.max_lines = self.height / self.font.glyph_height - 1 - - self.write('pyglet command console\n') - self.write('Version %s\n' % __version__) - - def on_key_press(self, symbol, modifiers): - # TODO cursor control / line editing - if modifiers & key.key.MOD_CTRL and symbol == key.key.C: - self.buffer = '' - self.pre_buffer = '' - return - if symbol == key.key.ENTER: - self.write('%s%s\n' % (self.get_prompt(), self.buffer)) - self.execute(self.pre_buffer + self.buffer) - self.buffer = '' - return - if symbol == key.key.BACKSPACE: - self.buffer = self.buffer[:-1] - return - return EVENT_UNHANDLED - - def on_text(self, text): - if ' ' <= text <= '~': - self.buffer += text - if 0xae <= ord(text) <= 0xff: - self.buffer += text - - def write(self, text): - if self.write_pending: - text = self.write_pending + text - self.write_pending = '' - - if type(text) in (str, unicode): - text = text.split('\n') - - if text[-1] != '': - self.write_pending = text[-1] - del text[-1] - - self.lines = [pyglet.text.layout_text(line.strip(), font=self.font) - for line in text] + self.lines - - if len(self.lines) > self.max_lines: - del self.lines[-1] - - def execute(self, input): - old_stderr, old_stdout = sys.stderr, sys.stdout - sys.stderr = sys.stdout = self - try: - c = code.compile_command(input, '') - if c is None: - self.pre_buffer = '%s\n' % input - else: - self.pre_buffer = '' - result = eval(c, self.globals, self.locals) - if result is not None: - self.write('%r\n' % result) - except: - traceback.print_exc() - self.pre_buffer = '' - sys.stderr = old_stderr - sys.stdout = old_stdout - - def get_prompt(self): - if self.pre_buffer: - return self.prompt2 - return self.prompt - - __last = None - def draw(self): - pyglet.text.begin() - glPushMatrix() - glTranslatef(0, self.height, 0) - for line in self.lines[::-1]: - line.draw() - glTranslatef(0, -self.font.glyph_height, 0) - line = self.get_prompt() + self.buffer - if self.__last is None or line != self.__last[0]: - self.__last = (line, pyglet.text.layout_text(line.strip(), - font=self.font)) - self.__last[1].draw() - glPopMatrix() - - pyglet.text.end() - -if __name__ == '__main__': - from pyglet.window import * - from pyglet.window.event import * - from pyglet import clock - w1 = Window(width=600, height=400) - console = Console(w1.width, w1.height) - - w1.push_handlers(console) - - c = clock.Clock() - - glMatrixMode(GL_PROJECTION) - glLoadIdentity() - glOrtho(0, w1.width, 0, w1.height, -1, 1) - glEnable(GL_COLOR_MATERIAL) - - glMatrixMode(GL_MODELVIEW) - glClearColor(1, 1, 1, 1) - while not w1.has_exit: - c.set_fps(60) - w1.dispatch_events() - glClear(GL_COLOR_BUFFER_BIT) - console.draw() - w1.flip() - diff --git a/tests/06nested/pyglet/font/win32.py b/tests/06nested/pyglet/font/win32.py deleted file mode 100644 index 99cd339..0000000 --- a/tests/06nested/pyglet/font/win32.py +++ /dev/null @@ -1,274 +0,0 @@ -# ---------------------------------------------------------------------------- -# pyglet -# Copyright (c) 2006-2008 Alex Holkner -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# * Neither the name of pyglet nor the names of its -# contributors may be used to endorse or promote products -# derived from this software without specific prior written -# permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# ---------------------------------------------------------------------------- - -''' -''' - -# TODO Windows Vista: need to call SetProcessDPIAware? May affect GDI+ calls -# as well as font. - -from ctypes import * -import ctypes -import math - -from sys import byteorder -import pyglet -from pyglet.font import base -import pyglet.image -from pyglet.libs.win32.constants import * -from pyglet.libs.win32.types import * -from pyglet.libs.win32 import _gdi32 as gdi32, _user32 as user32 -from pyglet.libs.win32 import _kernel32 as kernel32 -from pyglet.compat import asbytes - -_debug_font = pyglet.options['debug_font'] - - -def str_ucs2(text): - if byteorder == 'big': - text = text.encode('utf_16_be') - else: - text = text.encode('utf_16_le') # explicit endian avoids BOM - return create_string_buffer(text + '\0') - -_debug_dir = 'debug_font' -def _debug_filename(base, extension): - import os - if not os.path.exists(_debug_dir): - os.makedirs(_debug_dir) - name = '%s-%%d.%%s' % os.path.join(_debug_dir, base) - num = 1 - while os.path.exists(name % (num, extension)): - num += 1 - return name % (num, extension) - -def _debug_image(image, name): - filename = _debug_filename(name, 'png') - image.save(filename) - _debug('Saved image %r to %s' % (image, filename)) - -_debug_logfile = None -def _debug(msg): - global _debug_logfile - if not _debug_logfile: - _debug_logfile = open(_debug_filename('log', 'txt'), 'wt') - _debug_logfile.write(msg + '\n') - -class Win32GlyphRenderer(base.GlyphRenderer): - _bitmap = None - _dc = None - _bitmap_rect = None - - def __init__(self, font): - super(Win32GlyphRenderer, self).__init__(font) - self.font = font - - # Pessimistically round up width and height to 4 byte alignment - width = font.max_glyph_width - height = font.ascent - font.descent - width = (width | 0x3) + 1 - height = (height | 0x3) + 1 - self._create_bitmap(width, height) - - gdi32.SelectObject(self._dc, self.font.hfont) - - def _create_bitmap(self, width, height): - pass - - def render(self, text): - raise NotImplementedError('abstract') - -class GDIGlyphRenderer(Win32GlyphRenderer): - def __del__(self): - try: - if self._dc: - gdi32.DeleteDC(self._dc) - if self._bitmap: - gdi32.DeleteObject(self._bitmap) - except: - pass - - def render(self, text): - # Attempt to get ABC widths (only for TrueType) - abc = ABC() - if gdi32.GetCharABCWidthsW(self._dc, - ord(text), ord(text), byref(abc)): - width = abc.abcB - lsb = abc.abcA - advance = abc.abcA + abc.abcB + abc.abcC - else: - width_buf = c_int() - gdi32.GetCharWidth32W(self._dc, - ord(text), ord(text), byref(width_buf)) - width = width_buf.value - lsb = 0 - advance = width - - # Can't get glyph-specific dimensions, use whole line-height. - height = self._bitmap_height - image = self._get_image(text, width, height, lsb) - - glyph = self.font.create_glyph(image) - glyph.set_bearings(-self.font.descent, lsb, advance) - - if _debug_font: - _debug('%r.render(%s)' % (self, text)) - _debug('abc.abcA = %r' % abc.abcA) - _debug('abc.abcB = %r' % abc.abcB) - _debug('abc.abcC = %r' % abc.abcC) - _debug('width = %r' % width) - _debug('height = %r' % height) - _debug('lsb = %r' % lsb) - _debug('advance = %r' % advance) - _debug_image(image, 'glyph_%s' % text) - _debug_image(self.font.textures[0], 'tex_%s' % text) - - return glyph - - def _get_image(self, text, width, height, lsb): - # There's no such thing as a greyscale bitmap format in GDI. We can - # create an 8-bit palette bitmap with 256 shades of grey, but - # unfortunately antialiasing will not work on such a bitmap. So, we - # use a 32-bit bitmap and use the red channel as OpenGL's alpha. - - gdi32.SelectObject(self._dc, self._bitmap) - gdi32.SelectObject(self._dc, self.font.hfont) - gdi32.SetBkColor(self._dc, 0x0) - gdi32.SetTextColor(self._dc, 0x00ffffff) - gdi32.SetBkMode(self._dc, OPAQUE) - - # Draw to DC - user32.FillRect(self._dc, byref(self._bitmap_rect), self._black) - gdi32.ExtTextOutA(self._dc, -lsb, 0, 0, None, text, - len(text), None) - gdi32.GdiFlush() - - # Create glyph object and copy bitmap data to texture - image = pyglet.image.ImageData(width, height, - 'AXXX', self._bitmap_data, self._bitmap_rect.right * 4) - return image - - def _create_bitmap(self, width, height): - self._black = gdi32.GetStockObject(BLACK_BRUSH) - self._white = gdi32.GetStockObject(WHITE_BRUSH) - - if self._dc: - gdi32.ReleaseDC(self._dc) - if self._bitmap: - gdi32.DeleteObject(self._bitmap) - - pitch = width * 4 - data = POINTER(c_byte * (height * pitch))() - info = BITMAPINFO() - info.bmiHeader.biSize = sizeof(info.bmiHeader) - info.bmiHeader.biWidth = width - info.bmiHeader.biHeight = height - info.bmiHeader.biPlanes = 1 - info.bmiHeader.biBitCount = 32 - info.bmiHeader.biCompression = BI_RGB - - self._dc = gdi32.CreateCompatibleDC(None) - self._bitmap = gdi32.CreateDIBSection(None, - byref(info), DIB_RGB_COLORS, byref(data), None, - 0) - # Spookiness: the above line causes a "not enough storage" error, - # even though that error cannot be generated according to docs, - # and everything works fine anyway. Call SetLastError to clear it. - kernel32.SetLastError(0) - - self._bitmap_data = data.contents - self._bitmap_rect = RECT() - self._bitmap_rect.left = 0 - self._bitmap_rect.right = width - self._bitmap_rect.top = 0 - self._bitmap_rect.bottom = height - self._bitmap_height = height - - if _debug_font: - _debug('%r._create_dc(%d, %d)' % (self, width, height)) - _debug('_dc = %r' % self._dc) - _debug('_bitmap = %r' % self._bitmap) - _debug('pitch = %r' % pitch) - _debug('info.bmiHeader.biSize = %r' % info.bmiHeader.biSize) - -class Win32Font(base.Font): - glyph_renderer_class = GDIGlyphRenderer - - def __init__(self, name, size, bold=False, italic=False, dpi=None): - super(Win32Font, self).__init__() - - self.logfont = self.get_logfont(name, size, bold, italic, dpi) - self.hfont = gdi32.CreateFontIndirectA(byref(self.logfont)) - - # Create a dummy DC for coordinate mapping - dc = user32.GetDC(0) - metrics = TEXTMETRIC() - gdi32.SelectObject(dc, self.hfont) - gdi32.GetTextMetricsA(dc, byref(metrics)) - self.ascent = metrics.tmAscent - self.descent = -metrics.tmDescent - self.max_glyph_width = metrics.tmMaxCharWidth - - @staticmethod - def get_logfont(name, size, bold, italic, dpi): - # Create a dummy DC for coordinate mapping - dc = user32.GetDC(0) - if dpi is None: - dpi = 96 - logpixelsy = dpi - - logfont = LOGFONT() - # Conversion of point size to device pixels - logfont.lfHeight = int(-size * logpixelsy // 72) - if bold: - logfont.lfWeight = FW_BOLD - else: - logfont.lfWeight = FW_NORMAL - logfont.lfItalic = italic - logfont.lfFaceName = asbytes(name) - logfont.lfQuality = ANTIALIASED_QUALITY - return logfont - - @classmethod - def have_font(cls, name): - # CreateFontIndirect always returns a font... have to work out - # something with EnumFontFamily... TODO - return True - - @classmethod - def add_font_data(cls, data): - numfonts = c_uint32() - gdi32.AddFontMemResourceEx(data, len(data), 0, byref(numfonts)) - -# ... \ No newline at end of file diff --git a/tests/06nested/tests/app/EVENT_LOOP.py b/tests/06nested/tests/app/EVENT_LOOP.py deleted file mode 100644 index 782de99..0000000 --- a/tests/06nested/tests/app/EVENT_LOOP.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python -'''Test that the event loop can do timing. - -The test will display a series of intervals, iterations and sleep times. -It should then display an incrementing number up to 2x the number of -iterations, at a rate determined by the interval. -''' - -import sys -import unittest - -import pyglet - -__noninteractive = True - -if sys.platform in ('win32', 'cygwin'): - from time import clock as time -else: - from time import time -from time import sleep - -class EVENT_LOOP(unittest.TestCase): - def t_scheduled(self, interval, iterations, sleep_time=0): - print 'Test interval=%s, iterations=%s, sleep=%s' % (interval, - iterations, sleep_time) - warmup_iterations = iterations - - self.last_t = 0. - self.timer_count = 0 - def f(dt): - sys.stdout.write('%s\r' % self.timer_count) - sys.stdout.flush() - t = time() - self.timer_count += 1 - tc = self.timer_count - if tc > warmup_iterations: - self.assertAlmostEqual(dt, interval, places=2) - self.assertAlmostEqual(t - self.last_t, interval, places=2) - self.last_t = t - - if self.timer_count > iterations + warmup_iterations: - pyglet.app.exit() - if sleep_time: - sleep(sleep_time) - - pyglet.clock.schedule_interval(f, interval) - try: - pyglet.app.run() - finally: - pyglet.clock.unschedule(f) - print - - def test_1_5(self): - self.t_scheduled(1, 5, 0) - - def test_1_5_d5(self): - self.t_scheduled(1, 5, 0.5) - - def test_d1_50(self): - self.t_scheduled(.1, 50) - - def test_d1_50_d05(self): - self.t_scheduled(.1, 50, 0.05) - - def test_d05_50(self): - self.t_scheduled(.05, 50) - - def test_d05_50_d03(self): - self.t_scheduled(.05, 50, 0.03) - - def test_d02_50(self): - self.t_scheduled(.02, 50) - - def test_d01_50(self): - self.t_scheduled(.01, 50) - -if __name__ == '__main__': - if pyglet.version != '1.2dev': - print 'Wrong version of pyglet imported; please check your PYTHONPATH' - else: - unittest.main() diff --git a/tests/06nested/tests/font/SYSTEM.py b/tests/06nested/tests/font/SYSTEM.py deleted file mode 100644 index 4ecdebd..0000000 --- a/tests/06nested/tests/font/SYSTEM.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -'''Test that a font likely to be installed on the computer can be -loaded and displayed correctly. - -One window will open, it should show "Quickly brown fox" at 24pt using: - - * "Helvetica" on Mac OS X - * "Arial" on Windows - -''' - -__docformat__ = 'restructuredtext' -__version__ = '$Id: $' - -import unittest -import sys -from . import base_text - -if sys.platform == 'darwin': - font_name = 'Helvetica' -elif sys.platform in ('win32', 'cygwin'): - font_name = 'Arial' -else: - font_name = 'Arial' - -class TEST_SYSTEM(base_text.TextTestBase): - font_name = font_name - font_size = 24 - -if __name__ == '__main__': - unittest.main() diff --git a/tests/07google_code_wiki.from b/tests/07google_code_wiki.from deleted file mode 100644 index e86fe6f..0000000 --- a/tests/07google_code_wiki.from +++ /dev/null @@ -1,87 +0,0 @@ -#summary Plugin development -#sidebar TOC - - -!!WARNING!! The table below displays the navigation bar -!!WARNING!! Please don't touch it unless you know what you're doing. - - - - - - - - - - - - - - - - - -
http://wiki.spyderlib.googlecode.com/hg/Buttons/logo-mini.png
[http://code.google.com/p/spyderlib http://wiki.spyderlib.googlecode.com/hg/Buttons/home.png][http://code.google.com/p/spyderlib/downloads/list http://wiki.spyderlib.googlecode.com/hg/Buttons/download.png][http://code.google.com/p/spyderlib/wiki/Features http://wiki.spyderlib.googlecode.com/hg/Buttons/features.png][http://code.google.com/p/spyderlib/wiki/Screenshots http://wiki.spyderlib.googlecode.com/hg/Buttons/screenshots.png][http://packages.python.org/spyder/ http://wiki.spyderlib.googlecode.com/hg/Buttons/docs.png][http://code.google.com/p/spyderlib/wiki/Roadmap http://wiki.spyderlib.googlecode.com/hg/Buttons/roadmap.png][http://code.google.com/p/spyderlib/wiki/Support http://wiki.spyderlib.googlecode.com/hg/Buttons/support.png][http://code.google.com/p/spyderlib/wiki/Development http://wiki.spyderlib.googlecode.com/hg/Buttons/development_sel.png][http://spyder-ide.blogspot.com http://wiki.spyderlib.googlecode.com/hg/Buttons/blog.png]
- -= Spyder plugin development = ----- - -== Introduction == - -Spyder plugins are just importable Python files that use internal APIs to do different stuff in IDE. Spyder goal is to be reusable set of components and widgets, so a lot of stuff inside is made with plugins. For example, code editor and python console are plugins. Object inspector, history browser and so on. You can see them [https://code.google.com/p/spyderlib/source/browse/#hg%2Fspyderlib%2Fplugins here]. - -There are two type of plugins in Spyder: - * `core plugins` - these are standard Spyder components located in `spyderlib.plugins` module. They are explicitly imported like usual Python modules. - * `external plugins` - are discovered and imported dynamically, so they should follow naming convention so that Spyder can find them. External plugins are also initialized later all at once when some of the `core` set are already loaded `[reference need]`. - -Both plugin types should include class that inherits from the same base plugin class. - - - [ ] check if plugins can be enabled/disabled - [ ] list of all lookup dirs - [ ] how to debug plugin discovery - [ ] API reference, version information - [ ] time when core plugins are initialized - [ ] time when external plugins are initialized - -== Architecture == - -Spyder main window is de-facto storage for all Spyder state. - - - - -== Plugin discovery == - -`[more research is needed - ask techtonik@gmail.com if you feel capable]` - -While many software use directories to distribute and find plugins, Spyder uses Python modules. It's own plugin are located under `spyderlib` namespace with all Spyder application. It can be used as a library. External plugins are expected to be found in a special importable module named. - - * _spyderplugins_: this is what this page is about - -The module _spyderplugins_ includes third-party plugins of several kinds. -TODO: check if prefix for plugin kind really affects loading or if it just a convention - -In Spyder v1.1, the only kind of plugin supported were the input/output plugins (modules starting with `io_`) which provide I/O functions for the variable explorer (_Workspace_ and _Globals explorer_ in v1.1, _Variable explorer_ in v2.0). Spyder natively supports .mat, .npy, .txt, .csv, .jpg, .png and .tiff file formats. These plugins allow users to add their own types, like HDF5 files for example. - -In Spyder v2.0, any kind of plugin may be created in the module _spyderplugins_. These third-party plugins may communicate with other Spyder components through the plugin interface (see [http://code.google.com/p/spyderlib/source/browse/spyderlib/plugins/__init__.py spyderlib/plugins/__init__.py]). - -== I/O Spyder plugins == - -How to create your own I/O Spyder plugins: - * Create a Python module named `io_foobar.py` where `foobar` is the name of your plugin - * Write your loading function and/or your saving function: - * load: - * input is a string: filename - * output is a tuple with two elements: dictionary containing at least one element (key is the variable name) and error message (or `None`) - * save: - * input is a tuple with two elements: data (dictionary), filename - * output is a string or `None`: error message or `None` if no error occured - * Define the global variables `FORMAT_NAME`, `FORMAT_EXT`, `FORMAT_LOAD` and `FORMAT_SAVE`. See the example of DICOM images support: - http://code.google.com/p/spyderlib/source/browse/spyderplugins/io_dicom.py - * More examples of load/save functions may be found here: - http://code.google.com/p/spyderlib/source/browse/spyderlib/utils/iofuncs.py - -== Other Spyder plugins == - -See the example of the `pylint` third-party plugin in Spyder v2.0. \ No newline at end of file diff --git a/tests/07google_code_wiki.patch b/tests/07google_code_wiki.patch deleted file mode 100644 index b07f1db..0000000 --- a/tests/07google_code_wiki.patch +++ /dev/null @@ -1,41 +0,0 @@ ---- a/07google_code_wiki.from -+++ b/07google_code_wiki.to -@@ -28,7 +28,7 @@ - - == Introduction == - --Spyder plugins are just importable Python files that use internal APIs to do different stuff in IDE. Spyder goal is to be reusable set of components and widgets, so a lot of stuff inside is made with plugins. For example, code editor and python console are plugins. Object inspector, history browser and so on. You can see them [https://code.google.com/p/spyderlib/source/browse/#hg%2Fspyderlib%2Fplugins here]. -+Spyder plugins are importable Python modules that may use internal API to do different stuff in IDE. Spyder goal is to be reusable set of components and widgets, so a lot of things is made with plugins. For example, code editor and python console are plugins. Object inspector, history browser and so on. You can see them [https://code.google.com/p/spyderlib/source/browse/#hg%2Fspyderlib%2Fplugins here]. - - There are two type of plugins in Spyder: - * `core plugins` - these are standard Spyder components located in `spyderlib.plugins` module. They are explicitly imported like usual Python modules. -@@ -55,15 +55,15 @@ - - `[more research is needed - ask techtonik@gmail.com if you feel capable]` - --While many software use directories to distribute and find plugins, Spyder uses Python modules. It's own plugin are located under `spyderlib` namespace with all Spyder application. It can be used as a library. External plugins are expected to be found in a special importable module named. -+Some software use directories for plugin installation and discovery. Spyder uses Python modules. Internal components are contained in `spyderlib` namespace and are imported explicitly. External and 3rd party plugins are automatically imported from certain predefined locations. One such location is `spyderplugins` module. - - * _spyderplugins_: this is what this page is about - -+To make you module appear in `spyderplugins` namespace, you need to make Spyder discover and import it first. In Spyder source code tree you can just drop your module into `spyderplugins` directory. This is more like hack than convenient plugin discovery interface, so proposal to fix this are welcome. One of the ideas is to inspect Python files without importing them with `astdump` modules and check if their interface is compatible. This will also allow Spyder to enable/disable plugins from config menu. -+ - The module _spyderplugins_ includes third-party plugins of several kinds. - TODO: check if prefix for plugin kind really affects loading or if it just a convention - --In Spyder v1.1, the only kind of plugin supported were the input/output plugins (modules starting with `io_`) which provide I/O functions for the variable explorer (_Workspace_ and _Globals explorer_ in v1.1, _Variable explorer_ in v2.0). Spyder natively supports .mat, .npy, .txt, .csv, .jpg, .png and .tiff file formats. These plugins allow users to add their own types, like HDF5 files for example. -- - In Spyder v2.0, any kind of plugin may be created in the module _spyderplugins_. These third-party plugins may communicate with other Spyder components through the plugin interface (see [http://code.google.com/p/spyderlib/source/browse/spyderlib/plugins/__init__.py spyderlib/plugins/__init__.py]). - - == I/O Spyder plugins == -@@ -82,6 +82,10 @@ - * More examples of load/save functions may be found here: - http://code.google.com/p/spyderlib/source/browse/spyderlib/utils/iofuncs.py - -+== History === -+ -+Spyder v1.1 supported only input/output plugins (modules starting with `io_`) which provided I/O functions for the variable explorer (_Workspace_ and _Globals explorer_ in v1.1, _Variable explorer_ in v2.0). Spyder natively supports .mat, .npy, .txt, .csv, .jpg, .png and .tiff file formats out of the box. These input plugins allowed users to add their own types, like HDF5 files for example. -+ - == Other Spyder plugins == - - See the example of the `pylint` third-party plugin in Spyder v2.0. diff --git a/tests/07google_code_wiki.to b/tests/07google_code_wiki.to deleted file mode 100644 index 7307949..0000000 --- a/tests/07google_code_wiki.to +++ /dev/null @@ -1,91 +0,0 @@ -#summary Plugin development -#sidebar TOC - - -!!WARNING!! The table below displays the navigation bar -!!WARNING!! Please don't touch it unless you know what you're doing. - - - - - - - - - - - - - - - - - -
http://wiki.spyderlib.googlecode.com/hg/Buttons/logo-mini.png
[http://code.google.com/p/spyderlib http://wiki.spyderlib.googlecode.com/hg/Buttons/home.png][http://code.google.com/p/spyderlib/downloads/list http://wiki.spyderlib.googlecode.com/hg/Buttons/download.png][http://code.google.com/p/spyderlib/wiki/Features http://wiki.spyderlib.googlecode.com/hg/Buttons/features.png][http://code.google.com/p/spyderlib/wiki/Screenshots http://wiki.spyderlib.googlecode.com/hg/Buttons/screenshots.png][http://packages.python.org/spyder/ http://wiki.spyderlib.googlecode.com/hg/Buttons/docs.png][http://code.google.com/p/spyderlib/wiki/Roadmap http://wiki.spyderlib.googlecode.com/hg/Buttons/roadmap.png][http://code.google.com/p/spyderlib/wiki/Support http://wiki.spyderlib.googlecode.com/hg/Buttons/support.png][http://code.google.com/p/spyderlib/wiki/Development http://wiki.spyderlib.googlecode.com/hg/Buttons/development_sel.png][http://spyder-ide.blogspot.com http://wiki.spyderlib.googlecode.com/hg/Buttons/blog.png]
- -= Spyder plugin development = ----- - -== Introduction == - -Spyder plugins are importable Python modules that may use internal API to do different stuff in IDE. Spyder goal is to be reusable set of components and widgets, so a lot of things is made with plugins. For example, code editor and python console are plugins. Object inspector, history browser and so on. You can see them [https://code.google.com/p/spyderlib/source/browse/#hg%2Fspyderlib%2Fplugins here]. - -There are two type of plugins in Spyder: - * `core plugins` - these are standard Spyder components located in `spyderlib.plugins` module. They are explicitly imported like usual Python modules. - * `external plugins` - are discovered and imported dynamically, so they should follow naming convention so that Spyder can find them. External plugins are also initialized later all at once when some of the `core` set are already loaded `[reference need]`. - -Both plugin types should include class that inherits from the same base plugin class. - - - [ ] check if plugins can be enabled/disabled - [ ] list of all lookup dirs - [ ] how to debug plugin discovery - [ ] API reference, version information - [ ] time when core plugins are initialized - [ ] time when external plugins are initialized - -== Architecture == - -Spyder main window is de-facto storage for all Spyder state. - - - - -== Plugin discovery == - -`[more research is needed - ask techtonik@gmail.com if you feel capable]` - -Some software use directories for plugin installation and discovery. Spyder uses Python modules. Internal components are contained in `spyderlib` namespace and are imported explicitly. External and 3rd party plugins are automatically imported from certain predefined locations. One such location is `spyderplugins` module. - - * _spyderplugins_: this is what this page is about - -To make you module appear in `spyderplugins` namespace, you need to make Spyder discover and import it first. In Spyder source code tree you can just drop your module into `spyderplugins` directory. This is more like hack than convenient plugin discovery interface, so proposal to fix this are welcome. One of the ideas is to inspect Python files without importing them with `astdump` modules and check if their interface is compatible. This will also allow Spyder to enable/disable plugins from config menu. - -The module _spyderplugins_ includes third-party plugins of several kinds. -TODO: check if prefix for plugin kind really affects loading or if it just a convention - -In Spyder v2.0, any kind of plugin may be created in the module _spyderplugins_. These third-party plugins may communicate with other Spyder components through the plugin interface (see [http://code.google.com/p/spyderlib/source/browse/spyderlib/plugins/__init__.py spyderlib/plugins/__init__.py]). - -== I/O Spyder plugins == - -How to create your own I/O Spyder plugins: - * Create a Python module named `io_foobar.py` where `foobar` is the name of your plugin - * Write your loading function and/or your saving function: - * load: - * input is a string: filename - * output is a tuple with two elements: dictionary containing at least one element (key is the variable name) and error message (or `None`) - * save: - * input is a tuple with two elements: data (dictionary), filename - * output is a string or `None`: error message or `None` if no error occured - * Define the global variables `FORMAT_NAME`, `FORMAT_EXT`, `FORMAT_LOAD` and `FORMAT_SAVE`. See the example of DICOM images support: - http://code.google.com/p/spyderlib/source/browse/spyderplugins/io_dicom.py - * More examples of load/save functions may be found here: - http://code.google.com/p/spyderlib/source/browse/spyderlib/utils/iofuncs.py - -== History === - -Spyder v1.1 supported only input/output plugins (modules starting with `io_`) which provided I/O functions for the variable explorer (_Workspace_ and _Globals explorer_ in v1.1, _Variable explorer_ in v2.0). Spyder natively supports .mat, .npy, .txt, .csv, .jpg, .png and .tiff file formats out of the box. These input plugins allowed users to add their own types, like HDF5 files for example. - -== Other Spyder plugins == - -See the example of the `pylint` third-party plugin in Spyder v2.0. diff --git a/tests/Descript.ion b/tests/Descript.ion deleted file mode 100644 index 639c86a..0000000 --- a/tests/Descript.ion +++ /dev/null @@ -1,7 +0,0 @@ -01uni_multi.patch unified diff multiple files -02uni_newline.patch newline at the end of file -03trail_fname.patch trailing spaces in patch filenames -04can_patch.patch can be applied multiple times to the same file -data various patch data samples -06nested.patch nested dirs -05hg_change.patch mercurial diff diff --git a/tests/data/autofix/absolute-path.diff b/tests/data/autofix/absolute-path.diff deleted file mode 100644 index 8a1eabf..0000000 --- a/tests/data/autofix/absolute-path.diff +++ /dev/null @@ -1,52 +0,0 @@ -Index: c:/winnt/tests/run_tests.py -=================================================================== ---- c:/winnt/tests/run_tests.py (revision 132) -+++ c:/winnt/tests/run_tests.py (working copy) -@@ -240,6 +240,12 @@ - self.assertNotEqual(pto.parse(fp), True) - fp.close() - -+ def test_fail_absolute_path(self): -+ fp = open(join(tests_dir, "data/failing/absolute-path.diff")) -+ res = patch.PatchSet().parse(fp) -+ self.assertFalse(res) -+ fp.close() -+ - class TestPatchApply(unittest.TestCase): - def setUp(self): - self.save_cwd = os.getcwdu() -Index: c:/winnt/patch.py -=================================================================== ---- c:/winnt/patch.py (revision 132) -+++ c:/winnt/patch.py (working copy) -@@ -22,7 +22,7 @@ - from StringIO import StringIO - import urllib2 - --from os.path import exists, isfile, abspath -+from os.path import exists, isabs, isfile, abspath - from os import unlink - - -@@ -439,7 +439,21 @@ - - return (errors == 0) - -+ def process_filenames(): -+ """ sanitize filenames -+ return True on success -+ """ -+ errors = 0 - -+ for i,p in enumerate(self.items): -+ # -+ -+ # absolute paths are not allowed -+ if isabs(p.source) or isabs(p.target): -+ warning("error: absolute paths are not allowed for patch no.%d" % i) -+ -+ return (errors == 0) -+ - def apply(self): - """ apply parsed patch - return True on success diff --git a/tests/data/autofix/parent-path.diff b/tests/data/autofix/parent-path.diff deleted file mode 100644 index a60e2b9..0000000 --- a/tests/data/autofix/parent-path.diff +++ /dev/null @@ -1,13 +0,0 @@ -Index: patch.py -=================================================================== ---- ../patch.py (revision 151) -+++ ../patch.py (working copy) -@@ -13,7 +13,7 @@ - """ - - __author__ = "techtonik.rainforce.org" --__version__ = "1.11.10-dev" -+__version__ = "1.11.11-dev" - - import copy - import logging diff --git a/tests/data/autofix/stripped-trailing-whitespace.diff b/tests/data/autofix/stripped-trailing-whitespace.diff deleted file mode 100644 index f6985f7..0000000 --- a/tests/data/autofix/stripped-trailing-whitespace.diff +++ /dev/null @@ -1,62 +0,0 @@ -Index: Python/peephole.c -=================================================================== ---- Python/peephole.c (revision 72970) -+++ Python/peephole.c (working copy) -@@ -180,11 +180,12 @@ - } - - static int --fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts) -+fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts, -+ PyObject *names) - { - PyObject *newconst=NULL, *v; - Py_ssize_t len_consts; -- int opcode; -+ int opcode, i = 1, j; - - /* Pre-conditions */ - assert(PyList_CheckExact(consts)); -@@ -205,6 +206,11 @@ - case UNARY_INVERT: - newconst = PyNumber_Invert(v); - break; -+ case LOAD_ATTR: -+ i = 3; -+ newconst = PyObject_GetAttr(v, -+ PyTuple_GET_ITEM(names, GETARG(codestr, 3))); -+ break; - default: - /* Called with an unknown opcode */ - PyErr_Format(PyExc_SystemError, -@@ -226,9 +232,11 @@ - Py_DECREF(newconst); - - /* Write NOP LOAD_CONST newconst */ -- codestr[0] = NOP; -- codestr[1] = LOAD_CONST; -- SETARG(codestr, 1, len_consts); -+ for (j = 0; j < i; j++) { -+ codestr[j] = NOP; -+ } -+ codestr[i] = LOAD_CONST; -+ SETARG(codestr, i, len_consts); - return 1; - } - -@@ -484,10 +492,13 @@ - case UNARY_NEGATIVE: - case UNARY_CONVERT: - case UNARY_INVERT: -+ case LOAD_ATTR: - if (lastlc >= 1 && - ISBASICBLOCK(blocks, i-3, 4) && -- fold_unaryops_on_constants(&codestr[i-3], consts)) { -- i -= 2; -+ fold_unaryops_on_constants(&codestr[i-3], consts, names)) { -+ if (opcode != LOAD_ATTR) { -+ i -= 2; -+ } - assert(codestr[i] == LOAD_CONST); - cumlc = 1; - } diff --git a/tests/data/exotic/diff.py-python25.diff b/tests/data/exotic/diff.py-python25.diff deleted file mode 100644 index dc4cc6d..0000000 --- a/tests/data/exotic/diff.py-python25.diff +++ /dev/null @@ -1,22 +0,0 @@ ---- diff.py Sun Dec 27 16:08:28 2009 -+++ trunk/diff.py Sun Dec 27 15:46:58 2009 -@@ -7,7 +7,7 @@ - - """ - --import sys, os, datetime, difflib, optparse -+import sys, os, time, difflib, optparse - - def main(): - -@@ -29,8 +29,8 @@ - n = options.lines - fromfile, tofile = args - -- fromdate = datetime.datetime.fromtimestamp( os.stat(fromfile).st_mtime ).strftime(" ") -- todate = datetime.datetime.fromtimestamp( os.stat(fromfile).st_mtime ).strftime(" ") -+ fromdate = time.ctime(os.stat(fromfile).st_mtime) -+ todate = time.ctime(os.stat(tofile).st_mtime) - fromlines = open(fromfile, 'U').readlines() - tolines = open(tofile, 'U').readlines() - diff --git a/tests/data/exotic/dpatch.diff b/tests/data/exotic/dpatch.diff deleted file mode 100644 index 8f35356..0000000 --- a/tests/data/exotic/dpatch.diff +++ /dev/null @@ -1,20 +0,0 @@ -#! /bin/sh /usr/share/dpatch/dpatch-run -## 30_default_charset_utf8.dpatch by -## -## All lines beginning with `## DP:' are a description of the patch. -## DP: Use UTF-8 as default charset - -@DPATCH@ - -diff -uraN trac-0.11.5.orig/trac/mimeview/api.py trac-0.11.5/trac/mimeview/api.py ---- trac-0.11.5.orig/trac/mimeview/api.py 2009-06-30 21:18:58.000000000 +0200 -+++ trac-0.11.5/trac/mimeview/api.py 2009-09-28 22:02:35.000000000 +0200 -@@ -579,7 +579,7 @@ - annotators = ExtensionPoint(IHTMLPreviewAnnotator) - converters = ExtensionPoint(IContentConverter) - -- default_charset = Option('trac', 'default_charset', 'iso-8859-15', -+ default_charset = Option('trac', 'default_charset', 'utf-8', - """Charset to be used when in doubt.""") - - tab_width = IntOption('mimeviewer', 'tab_width', 8, diff --git a/tests/data/extended/git-added-new-empty-file.diff b/tests/data/extended/git-added-new-empty-file.diff deleted file mode 100644 index 3d5dc5a..0000000 --- a/tests/data/extended/git-added-new-empty-file.diff +++ /dev/null @@ -1,3 +0,0 @@ -diff --git a/new-file.txt b/new-file.txt -new file mode 100644 -index 0000000..e69de29 diff --git a/tests/data/extended/svn-added-new-empty-file.diff b/tests/data/extended/svn-added-new-empty-file.diff deleted file mode 100644 index 0305b5d..0000000 --- a/tests/data/extended/svn-added-new-empty-file.diff +++ /dev/null @@ -1,2 +0,0 @@ -Index: new-file.txt -=================================================================== diff --git a/tests/data/failing/context-format.diff b/tests/data/failing/context-format.diff deleted file mode 100644 index 49b8a06..0000000 --- a/tests/data/failing/context-format.diff +++ /dev/null @@ -1,24 +0,0 @@ -*** socketserver.py 2011-02-03 14:45:53.075396994 -0700 ---- socketserver-new.py 2011-02-03 14:57:53.185396998 -0700 -*************** -*** 224,229 **** ---- 224,232 ---- - r, w, e = select.select([self], [], [], poll_interval) - if self in r: - self._handle_request_noblock() -+ -+ if getattr(self, "active_children", None) != None and len(self.active_children) > 0: -+ self.collect_children() - finally: - self.__shutdown_request = False - self.__is_shut_down.set() -*************** -*** 521,527 **** - - def process_request(self, request, client_address): - """Fork a new subprocess to process the request.""" -- self.collect_children() - pid = os.fork() - if pid: - # Parent process ---- 524,529 ---- diff --git a/tests/data/failing/missing-hunk-line.diff b/tests/data/failing/missing-hunk-line.diff deleted file mode 100644 index 3f4ca77..0000000 --- a/tests/data/failing/missing-hunk-line.diff +++ /dev/null @@ -1,27 +0,0 @@ -Index: codereview/urls.py -=================================================================== ---- codereview/urls.py (revision 646) -+++ codereview/urls.py (working copy) -@@ -76,7 +76,8 @@ - (r'^_ah/xmpp/message/chat/', 'incoming_chat'), - (r'^_ah/mail/(.*)', 'incoming_mail'), - (r'^xsrf_token$', 'xsrf_token'), -- (r'^static/upload.py$', 'customized_upload_py'), -+ # patching upload.py on the fly -+ (r'^dynamic/upload.py$', 'customized_upload_py'), - (r'^search$', 'search'), - ) -Index: templates/use_uploadpy.html -=================================================================== ---- templates/use_uploadpy.html (revision 646) -+++ templates/use_uploadpy.html (working copy) -@@ -2,7 +2,7 @@ - - {%block body%} -

Tired of uploading files through the form?

--

Download upload.py, a simple tool for -+

Download upload.py, a simple tool for - uploading diffs from a version control system to the codereview app.

- -

Usage summary: -y \ No newline at end of file diff --git a/tests/data/failing/non-empty-patch-for-empty-file.diff b/tests/data/failing/non-empty-patch-for-empty-file.diff deleted file mode 100644 index 8efc328..0000000 --- a/tests/data/failing/non-empty-patch-for-empty-file.diff +++ /dev/null @@ -1,14 +0,0 @@ -Index: upload.py -=================================================================== ---- upload.py (revision 623) -+++ upload.py (working copy) -@@ -393,6 +393,9 @@ - ## elif e.code >= 500 and e.code < 600: - ## # Server Error - try again. - ## continue -+ elif e.code == 404: -+ ErrorExit("Error: RPC call to %s failed with status 404\n" -+ "Check upload server is valid - %s" % (request_path, self.host)) - else: - raise - finally: diff --git a/tests/data/failing/not-a-patch.log b/tests/data/failing/not-a-patch.log deleted file mode 100644 index 49ed60c..0000000 --- a/tests/data/failing/not-a-patch.log +++ /dev/null @@ -1,9 +0,0 @@ -*get* '220 ftp.**********.com MadGoat FTP server V2.5-3 ready.\r\n' -*resp* '220 ftp.**********.com MadGoat FTP server V2.5-3 ready.' -*welcome* '220 ftp.**********.com MadGoat FTP server V2.5-3 ready.' -*cmd* u'USER *******' -*put* u'USER *******\r\n' -*get* '331 Username "*******" Okay, need password.\r\n' -*resp* '331 Username "*******" Okay, need password.' -*cmd* u'PASS *******' -*put* u'PASS *******\r\n' diff --git a/tests/data/failing/upload.py b/tests/data/failing/upload.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/data/git-changed-2-files.diff b/tests/data/git-changed-2-files.diff deleted file mode 100644 index 35fa18a..0000000 --- a/tests/data/git-changed-2-files.diff +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/jsonpickle/__init__.py b/jsonpickle/__init__.py -index 22161dd..ea5ca6d 100644 ---- a/jsonpickle/__init__.py -+++ b/jsonpickle/__init__.py -@@ -87,7 +87,12 @@ class JSONPluginMgr(object): - self._decoders = {} - - ## Options to pass to specific encoders -- self._encoder_options = {} -+ json_opts = ((), {'sort_keys': True}) -+ self._encoder_options = { -+ 'json': json_opts, -+ 'simplejson': json_opts, -+ 'django.util.simplejson': json_opts, -+ } - - ## The exception class that is thrown when a decoding error occurs - self._decoder_exceptions = {} -diff --git a/tests/jsonpickle_test.py b/tests/jsonpickle_test.py -index c61dec4..09ba339 100644 ---- a/tests/jsonpickle_test.py -+++ b/tests/jsonpickle_test.py -@@ -427,6 +427,15 @@ class PicklingTestCase(unittest.TestCase): - inflated = self.unpickler.restore(flattened) - self.assertEqual(obj, inflated) - -+ def test_references(self): -+ obj_a = Thing('foo') -+ obj_b = Thing('bar') -+ coll = [obj_a, obj_b, obj_b] -+ flattened = self.pickler.flatten(coll) -+ inflated = self.unpickler.restore(flattened) -+ self.assertEqual(len(inflated), len(coll)) -+ for x in range(len(coll)): -+ self.assertEqual(repr(coll[x]), repr(inflated[x])) - - class JSONPickleTestCase(unittest.TestCase): - def setUp(self): diff --git a/tests/data/git-changed-file.diff b/tests/data/git-changed-file.diff deleted file mode 100644 index 35fa18a..0000000 --- a/tests/data/git-changed-file.diff +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/jsonpickle/__init__.py b/jsonpickle/__init__.py -index 22161dd..ea5ca6d 100644 ---- a/jsonpickle/__init__.py -+++ b/jsonpickle/__init__.py -@@ -87,7 +87,12 @@ class JSONPluginMgr(object): - self._decoders = {} - - ## Options to pass to specific encoders -- self._encoder_options = {} -+ json_opts = ((), {'sort_keys': True}) -+ self._encoder_options = { -+ 'json': json_opts, -+ 'simplejson': json_opts, -+ 'django.util.simplejson': json_opts, -+ } - - ## The exception class that is thrown when a decoding error occurs - self._decoder_exceptions = {} -diff --git a/tests/jsonpickle_test.py b/tests/jsonpickle_test.py -index c61dec4..09ba339 100644 ---- a/tests/jsonpickle_test.py -+++ b/tests/jsonpickle_test.py -@@ -427,6 +427,15 @@ class PicklingTestCase(unittest.TestCase): - inflated = self.unpickler.restore(flattened) - self.assertEqual(obj, inflated) - -+ def test_references(self): -+ obj_a = Thing('foo') -+ obj_b = Thing('bar') -+ coll = [obj_a, obj_b, obj_b] -+ flattened = self.pickler.flatten(coll) -+ inflated = self.unpickler.restore(flattened) -+ self.assertEqual(len(inflated), len(coll)) -+ for x in range(len(coll)): -+ self.assertEqual(repr(coll[x]), repr(inflated[x])) - - class JSONPickleTestCase(unittest.TestCase): - def setUp(self): diff --git a/tests/data/git-dash-in-filename.diff b/tests/data/git-dash-in-filename.diff deleted file mode 100644 index 3ff528b..0000000 --- a/tests/data/git-dash-in-filename.diff +++ /dev/null @@ -1,21 +0,0 @@ -From fe0fecac607022e8a8017c5209b79e4fda342213 Mon Sep 17 00:00:00 2001 -From: "Dr. Matthias St. Pierre" -Date: Thu, 10 Dec 2015 13:40:30 +0100 -Subject: [PATCH 2/2] modified hello-world.txt - ---- - hello-world.txt | 4 +++- - 1 file changed, 3 insertions(+), 1 deletion(-) - -diff --git a/hello-world.txt b/hello-world.txt -index 3b18e51..dcc4fd9 100644 ---- a/hello-world.txt -+++ b/hello-world.txt -@@ -1 +1,3 @@ --hello world -+hello world, -+how are you? -+ --- -2.4.10 - diff --git a/tests/data/hg-added-file.diff b/tests/data/hg-added-file.diff deleted file mode 100644 index 41e3ba5..0000000 --- a/tests/data/hg-added-file.diff +++ /dev/null @@ -1,59 +0,0 @@ -diff -r f51cb906c4ad -r 0260994e7b44 tests/utils.py ---- /dev/null Thu Jan 01 00:00:00 1970 +0000 -+++ b/tests/utils.py Sat Aug 06 07:26:28 2011 +0200 -@@ -0,0 +1,55 @@ -+# Copyright 2011 Google Inc. -+# -+# Licensed under the Apache License, Version 2.0 (the "License"); -+# you may not use this file except in compliance with the License. -+# You may obtain a copy of the License at -+# -+# http://www.apache.org/licenses/LICENSE-2.0 -+# -+# Unless required by applicable law or agreed to in writing, software -+# distributed under the License is distributed on an "AS IS" BASIS, -+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+# See the License for the specific language governing permissions and -+# limitations under the License. -+ -+"""Test utils.""" -+ -+import os -+ -+from google.appengine.ext import testbed -+ -+from django.test import TestCase as _TestCase -+ -+ -+class TestCase(_TestCase): -+ """Customized Django TestCase. -+ -+ This class disables the setup of Django features that are not -+ available on App Engine (e.g. fixture loading). And it initializes -+ the Testbad class provided by the App Engine SDK. -+ """ -+ -+ def _fixture_setup(self): # defined in django.test.TestCase -+ pass -+ -+ def _fixture_teardown(self): # defined in django.test.TestCase -+ pass -+ -+ def setUp(self): -+ super(TestCase, self).setUp() -+ self.testbed = testbed.Testbed() -+ self.testbed.activate() -+ self.testbed.init_datastore_v3_stub() -+ self.testbed.init_user_stub() -+ -+ def tearDown(self): -+ self.testbed.deactivate() -+ super(TestCase, self).tearDown() -+ -+ def login(self, email): -+ """Logs in a user identified by email.""" -+ os.environ['USER_EMAIL'] = email -+ -+ def logout(self): -+ """Logs the user out.""" -+ os.environ['USER_EMAIL'] = '' diff --git a/tests/data/hg-changed-2-files.diff b/tests/data/hg-changed-2-files.diff deleted file mode 100644 index 46462b6..0000000 --- a/tests/data/hg-changed-2-files.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff -r b2d9961ff1f5 TODO ---- a/TODO Sat Dec 26 16:36:37 2009 +0200 -+++ b/TODO Sun Dec 27 22:28:17 2009 +0200 -@@ -7,3 +7,7 @@ - - remove files - - svn diff - - hg diff -+ -+Source and target file conflicts: -+- two same source files in the same patch -+- one source and later one target file with the same name (that exists) -diff -r b2d9961ff1f5 test commit/review system test ---- a/test commit/review system test Sat Dec 26 16:36:37 2009 +0200 -+++ b/test commit/review system test Sun Dec 27 22:28:17 2009 +0200 -@@ -1,4 +1,4 @@ - something to - change in --this file --for review -\ No newline at end of file -+this file <-- this should be removed!!! ARRGH! BASTARD, HOW DARE YOU TO MESS WITH PROJECT HISTORY! -+for review diff --git a/tests/data/hg-exported.diff b/tests/data/hg-exported.diff deleted file mode 100644 index a0de958..0000000 --- a/tests/data/hg-exported.diff +++ /dev/null @@ -1,59 +0,0 @@ -# HG changeset patch -# User ralf@brainbot.com -# Date 1215097201 -7200 -# Branch xmlrpc_repr -# Node ID 2f52c1a4e3ff26d4d7b391e7851792d4e47d8017 -# Parent 185779ba2591ba6249601209c8dd5750b6e14716 -implement _Method.__repr__/__str__ - -diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py ---- a/Lib/test/test_xmlrpc.py -+++ b/Lib/test/test_xmlrpc.py -@@ -639,9 +639,31 @@ - os.remove("xmldata.txt") - os.remove(test_support.TESTFN) - -+class ReprTest(unittest.TestCase): -+ """ -+ calling repr/str on a _Method object should not consult the xmlrpc server -+ (http://bugs.python.org/issue1690840) -+ """ -+ def _make_method(self): -+ """return a _Method object, which when called raises a RuntimeError""" -+ def _raise_error(*args): -+ raise RuntimeError("called") -+ return xmlrpclib._Method(_raise_error, 'test') -+ -+ def test_method_repr(self): -+ m = self._make_method() -+ repr(m) -+ repr(xmlrpclib.ServerProxy('http://localhost:8000').doit) -+ -+ -+ def test_method_str(self): -+ m = self._make_method() -+ str(m) -+ str(xmlrpclib.ServerProxy('http://localhost:8000').doit) -+ - def test_main(): - xmlrpc_tests = [XMLRPCTestCase, HelperTestCase, DateTimeTestCase, -- BinaryTestCase, FaultTestCase] -+ BinaryTestCase, FaultTestCase, ReprTest] - - # The test cases against a SimpleXMLRPCServer raise a socket error - # 10035 (WSAEWOULDBLOCK) in the server thread handle_request call when -diff --git a/Lib/xmlrpclib.py b/Lib/xmlrpclib.py ---- a/Lib/xmlrpclib.py -+++ b/Lib/xmlrpclib.py -@@ -1191,7 +1191,10 @@ - return _Method(self.__send, "%s.%s" % (self.__name, name)) - def __call__(self, *args): - return self.__send(self.__name, args) -- -+ def __repr__(self): -+ return "<%s.%s %s %s>" % (self.__class__.__module__, self.__class__.__name__, self.__name, self.__send) -+ __str__ = __repr__ -+ - ## - # Standard transport class for XML-RPC over HTTP. - #

diff --git a/tests/data/svn-added-new-file-withcontent.diff b/tests/data/svn-added-new-file-withcontent.diff deleted file mode 100644 index a1ac10f..0000000 --- a/tests/data/svn-added-new-file-withcontent.diff +++ /dev/null @@ -1,7 +0,0 @@ -Index: new-file.txt -=================================================================== ---- new-file.txt (revision 0) -+++ new-file.txt (revision 0) -@@ -0,0 +1 @@ -+with content -\ No newline at end of file diff --git a/tests/data/svn-changed-2-files.diff b/tests/data/svn-changed-2-files.diff deleted file mode 100644 index 4f447e5..0000000 --- a/tests/data/svn-changed-2-files.diff +++ /dev/null @@ -1,86 +0,0 @@ -Index: trac/versioncontrol/svn_fs.py -=================================================================== ---- trac/versioncontrol/svn_fs.py (revision 8986) -+++ trac/versioncontrol/svn_fs.py (working copy) -@@ -289,7 +289,7 @@ - repos = fs_repos - else: - repos = CachedRepository(self.env.get_db_cnx, fs_repos, None, -- self.log) -+ self.log, self.env) - repos.has_linear_changesets = True - if authname: - authz = SubversionAuthorizer(self.env, weakref.proxy(repos), -Index: trac/versioncontrol/cache.py -=================================================================== ---- trac/versioncontrol/cache.py (revision 8986) -+++ trac/versioncontrol/cache.py (working copy) -@@ -18,7 +18,7 @@ - import os - import posixpath - --from trac.core import TracError -+from trac.core import * - from trac.util.datefmt import utc, to_timestamp - from trac.util.translation import _ - from trac.versioncontrol import Changeset, Node, Repository, Authorizer, \ -@@ -36,19 +36,42 @@ - CACHE_METADATA_KEYS = (CACHE_REPOSITORY_DIR, CACHE_YOUNGEST_REV) - - -+class ICacheChangesetListener(Interface): -+ """Cached changeset operations""" -+ -+ def edit_changeset(cset): -+ """Called when changeset is about to be cached. -+ Returns altered data to cache or None if unchanged. cset usually -+ contains cset.date, cset.author, cset.message and cset.rev -+ """ -+ -+class CacheManager(Component): -+ """Provide interface to plug-in into cache operations""" -+ -+ observers = ExtensionPoint(ICacheChangesetListener) -+ -+ def check_changeset(self, cset): -+ for observer in self.observers: -+ res = observer.edit_changeset(cset) -+ if res != None: -+ cset = res -+ return cset -+ -+ - class CachedRepository(Repository): - - has_linear_changesets = False - - scope = property(lambda self: self.repos.scope) - -- def __init__(self, getdb, repos, authz, log): -+ def __init__(self, getdb, repos, authz, log, env): - Repository.__init__(self, repos.name, authz, log) - if callable(getdb): - self.getdb = getdb - else: - self.getdb = lambda: getdb - self.repos = repos -+ self.cache_mgr = CacheManager(env) - - def close(self): - self.repos.close() -@@ -77,6 +100,7 @@ - - def sync_changeset(self, rev): - cset = self.repos.get_changeset(rev) -+ cset = self.cache_mgr.check_changeset(cset) - db = self.getdb() - cursor = db.cursor() - cursor.execute("UPDATE revision SET time=%s, author=%s, message=%s " -@@ -182,6 +206,7 @@ - self.log.info("Trying to sync revision [%s]" % - next_youngest) - cset = self.repos.get_changeset(next_youngest) -+ cset = self.cache_mgr.check_changeset(cset) - try: - cursor.execute("INSERT INTO revision " - " (rev,time,author,message) " diff --git a/tests/data/svn-modified-empty-file.diff b/tests/data/svn-modified-empty-file.diff deleted file mode 100644 index 704a565..0000000 --- a/tests/data/svn-modified-empty-file.diff +++ /dev/null @@ -1,7 +0,0 @@ -Index: upload.py -=================================================================== ---- upload.py (revision 120) -+++ upload.py (working copy) -@@ -0,0 +1 @@ -+new info -\ No newline at end of file diff --git a/tests/recoverage.bat b/tests/recoverage.bat deleted file mode 100644 index 2792a93..0000000 --- a/tests/recoverage.bat +++ /dev/null @@ -1,4 +0,0 @@ -cd .. -python -m coverage run tests/run_tests.py -python -m coverage html -d tests/coverage -python -m coverage report -m diff --git a/tests/run_tests.py b/tests/run_tests.py deleted file mode 100755 index 876aeae..0000000 --- a/tests/run_tests.py +++ /dev/null @@ -1,448 +0,0 @@ -#!/usr/bin/env python -""" -python-patch test suite - -There are two kind of tests: - - file-based tests - - directory-based tests - - unit tests - -File-based test is patch file, initial file and resulting file -for comparison. - -Directory-based test is a self-sufficient directory with: -files to be patched, patch file itself and [result] dir. You can -manually apply patch and compare outcome with [result] directory. -This is what this test runner does. - -Unit tests test API and are all inside this runner. - - -== Code Coverage == - -To refresh code coverage stats, get 'coverage' tool from -http://pypi.python.org/pypi/coverage/ and run this file with: - - coverage run run_tests.py - coverage html -d coverage - -On Windows it may be more convenient instead of `coverage` call -`python -m coverage.__main__` -""" -from __future__ import print_function - -import os -import sys -import re -import shutil -import unittest -import copy -from os import listdir -from os.path import abspath, dirname, exists, join, isdir, isfile -from tempfile import mkdtemp -try: - getcwdu = os.getcwdu -except AttributeError: - getcwdu = os.getcwd # python 3, where getcwd always returns a unicode object - -verbose = False -if "-v" in sys.argv or "--verbose" in sys.argv: - verbose = True - - -# full path for directory with tests -TESTS = dirname(abspath(__file__)) -TESTDATA = join(TESTS, 'data') -def testfile(name): - return join(TESTDATA, name) - - -# import patch.py from parent directory -save_path = sys.path -sys.path.insert(0, dirname(TESTS)) -import patch -sys.path = save_path - - -# ---------------------------------------------------------------------------- -class TestPatchFiles(unittest.TestCase): - """ - unittest hack - test* methods are generated by add_test_methods() function - below dynamically using information about *.patch files from tests directory - - """ - def _assert_files_equal(self, file1, file2): - f1 = f2 = None - try: - f1 = open(file1, "rb") - f2 = open(file2, "rb") - for line in f1: - self.assertEqual(line, f2.readline()) - - finally: - if f2: - f2.close() - if f1: - f1.close() - - def _assert_dirs_equal(self, dir1, dir2, ignore=[]): - """ - compare dir2 with reference dir1, ignoring entries - from supplied list - - """ - # recursive - if type(ignore) == str: - ignore = [ignore] - e2list = [en for en in listdir(dir2) if en not in ignore] - for e1 in listdir(dir1): - if e1 in ignore: - continue - e1path = join(dir1, e1) - e2path = join(dir2, e1) - self.assertTrue(exists(e1path)) - self.assertTrue(exists(e2path), "%s does not exist" % e2path) - self.assertTrue(isdir(e1path) == isdir(e2path)) - if not isdir(e1path): - self._assert_files_equal(e1path, e2path) - else: - self._assert_dirs_equal(e1path, e2path, ignore=ignore) - e2list.remove(e1) - for e2 in e2list: - self.fail("extra file or directory: %s" % e2) - - - def _run_test(self, testname): - """ - boilerplate for running *.patch file tests - """ - - # 1. create temp test directory - # 2. copy files - # 3. execute file-based patch - # 4. compare results - # 5. cleanup on success - - tmpdir = mkdtemp(prefix="%s."%testname) - - basepath = join(TESTS, testname) - basetmp = join(tmpdir, testname) - - patch_file = basetmp + ".patch" - - file_based = isfile(basepath + ".from") - from_tgt = basetmp + ".from" - - if file_based: - shutil.copy(basepath + ".from", tmpdir) - shutil.copy(basepath + ".patch", tmpdir) - else: - # directory-based - for e in listdir(basepath): - epath = join(basepath, e) - if not isdir(epath): - shutil.copy(epath, join(tmpdir, e)) - else: - shutil.copytree(epath, join(tmpdir, e)) - - - # 3. - # test utility as a whole - patch_tool = join(dirname(TESTS), "patch.py") - save_cwd = getcwdu() - os.chdir(tmpdir) - if verbose: - cmd = '%s %s "%s"' % (sys.executable, patch_tool, patch_file) - print("\n"+cmd) - else: - cmd = '%s %s -q "%s"' % (sys.executable, patch_tool, patch_file) - ret = os.system(cmd) - assert ret == 0, "Error %d running test %s" % (ret, testname) - os.chdir(save_cwd) - - - # 4. - # compare results - if file_based: - self._assert_files_equal(basepath + ".to", from_tgt) - else: - # recursive comparison - self._assert_dirs_equal(join(basepath, "[result]"), - tmpdir, - ignore=["%s.patch" % testname, ".svn", "[result]"]) - - - shutil.rmtree(tmpdir) - return 0 - - -def add_test_methods(cls): - """ - hack to generate test* methods in target class - one - for each *.patch file in tests directory - """ - - # list testcases - every test starts with number - # and add them as test* methods - testptn = re.compile(r"^(?P\d{2,}[^\.]+).*$") - - testset = [testptn.match(e).group('name') for e in listdir(TESTS) if testptn.match(e)] - testset = sorted(set(testset)) - - for filename in testset: - methname = 'test_' + filename - def create_closure(): - name = filename - return lambda self: self._run_test(name) - test = create_closure() - setattr(cls, methname, test) - if verbose: - print("added test method %s to %s" % (methname, cls)) -add_test_methods(TestPatchFiles) - -# ---------------------------------------------------------------------------- - -class TestCheckPatched(unittest.TestCase): - def setUp(self): - self.save_cwd = getcwdu() - os.chdir(TESTS) - - def tearDown(self): - os.chdir(self.save_cwd) - - def test_patched_multipatch(self): - pto = patch.fromfile("01uni_multi/01uni_multi.patch") - os.chdir(join(TESTS, "01uni_multi", "[result]")) - self.assertTrue(pto.can_patch(b"updatedlg.cpp")) - - def test_can_patch_single_source(self): - pto2 = patch.fromfile("02uni_newline.patch") - self.assertTrue(pto2.can_patch(b"02uni_newline.from")) - - def test_can_patch_fails_on_target_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertEqual(None, pto3.can_patch(b"03trail_fname.to")) - self.assertEqual(None, pto3.can_patch(b"not_in_source.also")) - - def test_multiline_false_on_other_file(self): - pto = patch.fromfile("01uni_multi/01uni_multi.patch") - os.chdir(join(TESTS, "01uni_multi")) - self.assertFalse(pto.can_patch(b"updatedlg.cpp")) - - def test_single_false_on_other_file(self): - pto3 = patch.fromfile("03trail_fname.patch") - self.assertFalse(pto3.can_patch("03trail_fname.from")) - - def test_can_patch_checks_source_filename_even_if_target_can_be_patched(self): - pto2 = patch.fromfile("04can_patch.patch") - self.assertFalse(pto2.can_patch("04can_patch.to")) - -# ---------------------------------------------------------------------------- - -class TestPatchParse(unittest.TestCase): - def test_fromstring(self): - try: - f = open(join(TESTS, "01uni_multi/01uni_multi.patch"), "rb") - readstr = f.read() - finally: - f.close() - pst = patch.fromstring(readstr) - self.assertEqual(len(pst), 5) - - def test_fromfile(self): - pst = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) - self.assertNotEqual(pst, False) - self.assertEqual(len(pst), 5) - ps2 = patch.fromfile(testfile("failing/not-a-patch.log")) - self.assertFalse(ps2) - - def test_no_header_for_plain_diff_with_single_file(self): - pto = patch.fromfile(join(TESTS, "03trail_fname.patch")) - self.assertEqual(pto.items[0].header, []) - - def test_header_for_second_file_in_svn_diff(self): - pto = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) - self.assertEqual(pto.items[1].header[0], b'Index: updatedlg.h\r\n') - self.assertTrue(pto.items[1].header[1].startswith(b'=====')) - - def test_hunk_desc(self): - pto = patch.fromfile(testfile('git-changed-file.diff')) - self.assertEqual(pto.items[0].hunks[0].desc, b'class JSONPluginMgr(object):') - - def test_autofixed_absolute_path(self): - pto = patch.fromfile(join(TESTS, "data/autofix/absolute-path.diff")) - self.assertEqual(pto.errors, 0) - self.assertEqual(pto.warnings, 2) - self.assertEqual(pto.items[0].source, b"winnt/tests/run_tests.py") - - def test_autofixed_parent_path(self): - # [ ] exception vs return codes for error recovery - # [x] separate return code when patch lib compensated the error - # (implemented as warning count) - pto = patch.fromfile(join(TESTS, "data/autofix/parent-path.diff")) - self.assertEqual(pto.errors, 0) - self.assertEqual(pto.warnings, 2) - self.assertEqual(pto.items[0].source, b"patch.py") - - def test_autofixed_stripped_trailing_whitespace(self): - pto = patch.fromfile(join(TESTS, "data/autofix/stripped-trailing-whitespace.diff")) - self.assertEqual(pto.errors, 0) - self.assertEqual(pto.warnings, 4) - - def test_fail_missing_hunk_line(self): - fp = open(join(TESTS, "data/failing/missing-hunk-line.diff"), 'rb') - pto = patch.PatchSet() - self.assertNotEqual(pto.parse(fp), True) - fp.close() - - def test_fail_context_format(self): - fp = open(join(TESTS, "data/failing/context-format.diff"), 'rb') - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - - def test_fail_not_a_patch(self): - fp = open(join(TESTS, "data/failing/not-a-patch.log"), 'rb') - res = patch.PatchSet().parse(fp) - self.assertFalse(res) - fp.close() - - def test_diffstat(self): - output = """\ - updatedlg.cpp | 20 ++++++++++++++++++-- - updatedlg.h | 1 + - manifest.xml | 15 ++++++++------- - conf.cpp | 23 +++++++++++++++++------ - conf.h | 7 ++++--- - 5 files changed, 48 insertions(+), 18 deletions(-), +1203 bytes""" - pto = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) - self.assertEqual(pto.diffstat(), output, "Output doesn't match") - - -class TestPatchSetDetection(unittest.TestCase): - def test_svn_detected(self): - pto = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) - self.assertEqual(pto.type, patch.SVN) - -# generate tests methods for TestPatchSetDetection - one for each patch file -def generate_detection_test(filename, patchtype): - # saving variable in local scope to prevent test() - # from fetching it from global - patchtype = difftype - def test(self): - pto = patch.fromfile(join(TESTDATA, filename)) - self.assertEqual(pto.type, patchtype) - return test - -for filename in os.listdir(TESTDATA): - if isdir(join(TESTDATA, filename)): - continue - - difftype = patch.PLAIN - if filename.startswith('git-'): - difftype = patch.GIT - if filename.startswith('hg-'): - difftype = patch.HG - if filename.startswith('svn-'): - difftype = patch.SVN - - name = 'test_'+filename - test = generate_detection_test(filename, difftype) - setattr(TestPatchSetDetection, name, test) - if verbose: - print("added test method %s to %s" % (name, 'TestPatchSetDetection')) - - -class TestPatchApply(unittest.TestCase): - def setUp(self): - self.save_cwd = getcwdu() - self.tmpdir = mkdtemp(prefix=self.__class__.__name__) - os.chdir(self.tmpdir) - - def tearDown(self): - os.chdir(self.save_cwd) - shutil.rmtree(self.tmpdir) - - def tmpcopy(self, filenames): - """copy file(s) from test_dir to self.tmpdir""" - for f in filenames: - shutil.copy(join(TESTS, f), self.tmpdir) - - def test_apply_returns_false_on_failure(self): - self.tmpcopy(['data/failing/non-empty-patch-for-empty-file.diff', - 'data/failing/upload.py']) - pto = patch.fromfile('non-empty-patch-for-empty-file.diff') - self.assertFalse(pto.apply()) - - def test_apply_returns_true_on_success(self): - self.tmpcopy(['03trail_fname.patch', - '03trail_fname.from']) - pto = patch.fromfile('03trail_fname.patch') - self.assertTrue(pto.apply()) - - def test_revert(self): - def get_file_content(filename): - with open(filename, 'rb') as f: - return f.read() - - self.tmpcopy(['03trail_fname.patch', - '03trail_fname.from']) - pto = patch.fromfile('03trail_fname.patch') - self.assertTrue(pto.apply()) - self.assertNotEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), - get_file_content(TESTS + '/03trail_fname.from')) - self.assertTrue(pto.revert()) - self.assertEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), - get_file_content(TESTS + '/03trail_fname.from')) - - def test_apply_root(self): - treeroot = join(self.tmpdir, 'rootparent') - shutil.copytree(join(TESTS, '06nested'), treeroot) - pto = patch.fromfile(join(TESTS, '06nested/06nested.patch')) - self.assertTrue(pto.apply(root=treeroot)) - - def test_apply_strip(self): - treeroot = join(self.tmpdir, 'rootparent') - shutil.copytree(join(TESTS, '06nested'), treeroot) - pto = patch.fromfile(join(TESTS, '06nested/06nested.patch')) - for p in pto: - p.source = b'nasty/prefix/' + p.source - p.target = b'nasty/prefix/' + p.target - self.assertTrue(pto.apply(strip=2, root=treeroot)) - - -class TestHelpers(unittest.TestCase): - # unittest setting - longMessage = True - - absolute = [b'/', b'c:\\', b'c:/', b'\\', b'/path', b'c:\\path'] - relative = [b'path', b'path:\\', b'path:/', b'path\\', b'path/', b'path\\path'] - - def test_xisabs(self): - for path in self.absolute: - self.assertTrue(patch.xisabs(path), 'Target path: ' + repr(path)) - for path in self.relative: - self.assertFalse(patch.xisabs(path), 'Target path: ' + repr(path)) - - def test_xnormpath(self): - path = b"../something/..\\..\\file.to.patch" - self.assertEqual(patch.xnormpath(path), b'../../file.to.patch') - - def test_xstrip(self): - for path in self.absolute[:4]: - self.assertEqual(patch.xstrip(path), b'') - for path in self.absolute[4:6]: - self.assertEqual(patch.xstrip(path), b'path') - # test relative paths are not affected - for path in self.relative: - self.assertEqual(patch.xstrip(path), path) - - def test_pathstrip(self): - self.assertEqual(patch.pathstrip(b'path/to/test/name.diff', 2), b'test/name.diff') - self.assertEqual(patch.pathstrip(b'path/name.diff', 1), b'name.diff') - self.assertEqual(patch.pathstrip(b'path/name.diff', 0), b'path/name.diff') - -# ---------------------------------------------------------------------------- - -if __name__ == '__main__': - unittest.main()