8000 Get tests running cleanly with -3 · awesome-python/html5lib-python@ed014f6 · GitHub
[go: up one dir, main page]

Skip to content

Commit ed014f6

Browse files
committed
Get tests running cleanly with -3
1 parent df16775 commit ed014f6

File tree

3 files changed

+11
-5
lines changed

3 files changed

+11
-5
lines changed

html5lib/sanitizer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -166,12 +166,12 @@ def sanitize_token(self, token):
166166
if token_type in (tokenTypes["StartTag"], tokenTypes["EndTag"],
167167
tokenTypes["EmptyTag"]):
168168
if token["name"] in self.allowed_elements:
169-
if token.has_key("data"):
169+
if "data" in token:
170170
attrs = dict([(name,val) for name,val in
171171
token["data"][::-1]
172172
if name in self.allowed_attributes])
173173
for attr in self.attr_val_is_uri:
174-
if not attrs.has_key(attr):
174+
if attr not in attrs:
175175
continue
176176
val_unescaped = re.sub("[`\000-\040\177-\240\s]+", '',
177177
unescape(attrs[attr])).lower()
@@ -190,7 +190,7 @@ def sanitize_token(self, token):
190190
'xlink:href' in attrs and re.search('^\s*[^#\s].*',
191191
attrs['xlink:href'])):
192192
del attrs['xlink:href']
193-
if attrs.has_key('style'):
193+
if 'style' in attrs:
194194
attrs['style'] = self.sanitize_css(attrs['style'])
195195
token["data"] = [[name,val] for name,val in attrs.items()]
196196
return token

html5lib/serializer/htmlserializer.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,12 @@
44
# Import from the sets module for python 2.3
55
from sets import ImmutableSet as frozenset
66

7+
try:
8+
# use functools.reduce to avoid DeprecationWarning with -3
9+
from functools import reduce
10+
except ImportError:
11+
pass
12+
713
import gettext
814
_ = gettext.gettext
915

@@ -150,7 +156,7 @@ def __init__(self, **kwargs):
150156
151157
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
152158
"""
153-
if kwargs.has_key('quote_char'):
159+
if 'quote_char' in kwargs:
154160
self.use_best_quote_char = False
155161
for attr in self.options:
156162
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))

html5lib/tests/test_tokenizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
124124

125125
def unescape_test(test):
126126
def decode(inp):
127-
return inp.decode("unicode-escape")
127+
return inp.encode("utf-8").decode("unicode-escape")
128128

129129
test["input"] = decode(test["input"])
130130
for token in test["output"]:

0 commit comments

Comments
 (0)
0