8000 Move tokenizer tests to nose · html5lib/html5lib-python@918027c · GitHub
[go: up one dir, main page]

Skip to content

Commit 918027c

Browse files
author
James Graham
committed
Move tokenizer tests to nose
--HG-- extra : rebase_source : 690f67ebe16128990d5a788d1c53833c1f0af2f4
1 parent bfa3cd3 commit 918027c

File tree

1 file changed

+3
-8
lines changed

1 file changed

+3
-8
lines changed

html5lib/test 10000 s/test_tokenizer.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import sys
22
import os
3-
import unittest
43
import cStringIO
54
import warnings
65
import re
@@ -122,7 +121,7 @@ def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
122121
tokens[tokenType][1].append(token)
123122
return tokens["expected"] == tokens["received"]
124123

125-
def unescape_test(test):
124+
def unescape(test):
126125
def decode(inp):
127126
return inp.decode("unicode-escape")
128127

@@ -138,7 +137,6 @@ def decode(inp):
138137
token[2][decode(key)] = decode(value)
139138
return test
140139

141-
142140
def runTokenizerTest(test):
143141
#XXX - move this out into the setup function
144142
#concatenate all consecutive character tokens into a single token
@@ -163,8 +161,7 @@ def runTokenizerTest(test):
163161
"\nreceived:", unicode(tokens)])
164162
errorMsg = errorMsg.encode("utf-8")
165163
ignoreErrorOrder = test.get('ignoreErrorOrder', False)
166-
assert tokensMatch(expected, received, ignoreErrorOrder), errorMsg
167-
164+
assert tokensMatch(expected, received, ignoreErrorOrder, True), errorMsg
168165

169166
def _doCapitalize(match):
170167
return match.group(1).upper()
@@ -176,8 +173,7 @@ def capitalize(s):
176173
s = _capitalizeRe(_doCapitalize, s)
177174
return s
178175

179-
180-
def test_tokenizer():
176+
def testTokenizer():
181177
for filename in html5lib_test_files('tokenizer', '*.test'):
182178
tests = json.load(file(filename))
183179
testName = os.path.basename(filename).replace(".test","")
@@ -190,4 +186,3 @@ def test_tokenizer():
190186
for initialState in test["initialStates"]:
191187
test["initialState"] = capitalize(initialState)
192188
yield runTokenizerTest, test
193-

0 commit comments

Comments
 (0)
0