4
4
import stat
5
5
import sys
6
6
import re
7
+ from test import support
7
8
from test .support import os_helper
8
9
from test .support import warnings_helper
9
10
from test .support .testcase import ExtraAssertions
@@ -106,8 +107,7 @@ def test_http2time_formats(self):
106
107
self .assertEqual (http2time (s .lower ()), test_t , s .lower ())
107
108
self .assertEqual (http2time (s .upper ()), test_t , s .upper ())
108
109
109
- def test_http2time_garbage (self ):
110
- for test in [
110
+ @support .subTests ('test' , [
111
111
'' ,
112
112
'Garbage' ,
113
113
'Mandag 16. September 1996' ,
@@ -122,10 +122,9 @@ def test_http2time_garbage(self):
122
122
'08-01-3697739' ,
123
123
'09 Feb 19942632 22:23:32 GMT' ,
124
124
'Wed, 09 Feb 1994834 22:23:32 GMT' ,
125
- ]:
126
- self .assertIsNone (http2time (test ),
127
- "http2time(%s) is not None\n "
128
- "http2time(test) %s" % (test , http2time (test )))
125
+ ])
126
+ def test_http2time_garbage (self , test ):
127
+ self .assertIsNone (http2time (test ))
129
128
130
129
def test_http2time_redos_regression_actually_completes (self ):
131
130
# LOOSE_HTTP_DATE_RE was vulnerable to malicious input which caused catastrophic backtracking (REDoS).
@@ -150,9 +149,7 @@ def parse_date(text):
150
149
self .assertEqual (parse_date ("1994-02-03 19:45:29 +0530" ),
151
150
(1994 , 2 , 3 , 14 , 15 , 29 ))
152
151
153
- def test_iso2time_formats (self ):
154
- # test iso2time for supported dates.
155
- tests = [
152
+ @support .subTests ('s' , [
156
153
'1994-02-03 00:00:00 -0000' , # ISO 8601 format
157
154
'1994-02-03 00:00:00 +0000' , # ISO 8601 format
158
155
'1994-02-03 00:00:00' , # zone is optional
@@ -165,16 +162,15 @@ def test_iso2time_formats(self):
165
162
# A few tests with extra space at various places
166
163
' 1994-02-03 ' ,
167
164
' 1994-02-03T00:00:00 ' ,
168
- ]
169
-
165
+ ])
166
+ def test_iso2time_formats (self , s ):
167
+ # test iso2time for supported dates.
170
168
test_t = 760233600 # assume broken POSIX counting of seconds
171
- for s in tests :
172
- self .assertEqual (iso2time (s ), test_t , s )
173
- self .assertEqual (iso2time (s .lower ()), test_t , s .lower ())
174
- self .assertEqual (iso2time (s .upper ()), test_t , s .upper ())
169
+ self .assertEqual (iso2time (s ), test_t , s )
170
+ self .assertEqual (iso2time (s .lower ()), test_t , s .lower ())
171
+ self .assertEqual (iso2time (s .upper ()), test_t , s .upper ())
175
172
176
- def test_iso2time_garbage (self ):
177
- for test in [
173
+ @support .subTests ('test' , [
178
174
'' ,
179
175
'Garbage' ,
180
176
'Thursday, 03-Feb-94 00:00:00 GMT' ,
@@ -187,9 +183,9 @@ def test_iso2time_garbage(self):
187
183
'01-01-1980 00:00:62' ,
188
184
'01-01-1980T00:00:62' ,
189
185
'19800101T250000Z' ,
190
- ]:
191
- self . assertIsNone ( iso2time ( test ),
192
- " iso2time(%r)" % test )
186
+ ])
187
+ def test_iso2time_garbage ( self , test ):
188
+ self . assertIsNone ( iso2time (test ) )
193
189
194
190
def test_iso2time_performance_regression (self ):
195
191
# If ISO_DATE_RE regresses to quadratic complexity, this test will take a very long time to succeed.
@@ -200,24 +196,23 @@ def test_iso2time_performance_regression(self):
200
196
201
197
class HeaderTests (unittest .TestCase ):
202
198
203
- def test_parse_ns_headers (self ):
204
- # quotes should be stripped
205
- expected = [[('foo' , 'bar' ), ('expires' , 2209069412 ), ('version' , '0' )]]
206
- for hdr in [
199
+ @support .subTests ('hdr' , [
207
200
'foo=bar; expires=01 Jan 2040 22:23:32 GMT' ,
208
201
'foo=bar; expires="01 Jan 2040 22:23:32 GMT"' ,
209
- ]:
210
- self .assertEqual (parse_ns_headers ([hdr ]), expected )
211
-
212
- def test_parse_ns_headers_version (self ):
213
-
202
+ ])
203
+ def test_parse_ns_headers (self , hdr ):
214
204
# quotes should be stripped
215
- expected = [[('foo' , 'bar' ), ('version' , '1' )]]
216
- for hdr in [
205
+ expected = [[('foo' , 'bar' ), ('expires' , 2209069412 ), ('version' , '0' )]]
206
+ self .assertEqual (parse_ns_headers ([hdr ]), expected )
207
+
208
+ @support .subTests ('hdr' , [
217
209
'foo=bar; version=
8000
"1"' ,
218
210
'foo=bar; Version="1"' ,
219
- ]:
220
- self .assertEqual (parse_ns_headers ([hdr ]), expected )
211
+ ])
212
+ def test_parse_ns_headers_version (self , hdr ):
213
+ # quotes should be stripped
214
+ expected = [[('foo' , 'bar' ), ('version' , '1' )]]
215
+ self .assertEqual (parse_ns_headers ([hdr ]), expected )
221
216
222
217
def test_parse_ns_headers_special_names (self ):
223
218
# names such as 'expires' are not special in first name=value pair
@@ -233,8 +228,7 @@ def test_join_header_words(self):
233
228
234
229
self .assertEqual (join_header_words ([[]]), "" )
235
230
236
- def test_split_header_words (self ):
237
- tests = [
231
+ @support .subTests ('arg,expect' , [
238
232
("foo" , [[("foo" , None )]]),
239
233
("foo=bar" , [[("foo" , "bar" )]]),
240
234
(" foo " , [[("foo" , None )]]),
@@ -251,24 +245,22 @@ def test_split_header_words(self):
251
245
(r'foo; bar=baz, spam=, foo="\,\;\"", bar= ' ,
252
246
[[("foo" , None ), ("bar" , "baz" )],
253
247
[("spam" , "" )], [("foo" , ',;"' )], [("bar" , "" )]]),
254
- ]
255
-
256
- for arg , expect in tests :
257
- try :
258
- result = split_header_words ([arg ])
259
- except :
260
- import traceback , io
261
- f = io .StringIO ()
262
- traceback .print_exc (None , f )
263
- result = "(error -- traceback follows)\n \n %s" % f .getvalue ()
264
- self .assertEqual (result , expect , """
248
+ ])
249
+ def test_split_header_words (self , arg , expect ):
250
+ try :
251
+ result = split_header_words ([arg ])
252
+ except :
253
+ import traceback , io
254
+ f = io .StringIO ()
255
+ traceback .print_exc (None , f )
256
+ result = "(error -- traceback follows)\n \n %s" % f .getvalue ()
257
+ self .assertEqual (result , expect , """
265
258
When parsing: '%s'
266
259
Expected: '%s'
267
260
Got: '%s'
268
261
""" % (arg , expect , result ))
269
262
270
- def test_roundtrip (self ):
271
- tests = [
263
+ @support .subTests ('arg,expect' , [
272
264
("foo" , "foo" ),
273
265
("foo=bar" , "foo=bar" ),
274
266
(" foo " , "foo" ),
@@ -301,12 +293,11 @@ def test_roundtrip(self):
301
293
302
294
('n; foo="foo;_", bar="foo,_"' ,
303
295
'n; foo="foo;_", bar="foo,_"' ),
304
- ]
305
-
306
- for arg , expect in tests :
307
- input = split_header_words ([arg ])
308
- res = join_header_words (input )
309
- self .assertEqual (res , expect , """
296
+ ])
297
+ def test_roundtrip (self , arg , expect ):
298
+ input = split_header_words ([arg ])
299
+ res = join_header_words (input )
300
+ self .assertEqual (res , expect , """
310
301
When parsing: '%s'
311
302
Expected: '%s'
312
303
Got: '%s'
@@ -508,14 +499,7 @@ class CookieTests(unittest.TestCase):
508
499
## just the 7 special TLD's listed in their spec. And folks rely on
509
500
## that...
510
501
511
- def test_domain_return_ok (self ):
512
- # test optimization: .domain_return_ok() should filter out most
513
- # domains in the CookieJar before we try to access them (because that
514
- # may require disk access -- in particular, with MSIECookieJar)
515
- # This is only a rough check for performance reasons, so it's not too
516
- # critical as long as it's sufficiently liberal.
517
- pol = DefaultCookiePolicy ()
518
- for url , domain , ok in [
502
+ @support .subTests ('url,domain,ok' , [
519
503
("http://foo.bar.com/" , "blah.com" , False ),
520
504
("http://foo.bar.com/" , "rhubarb.blah.com" , False ),
521
505
("http://foo.bar.com/" , "rhubarb.foo.bar.com" , False ),
@@ -535,11 +519,18 @@ def test_domain_return_ok(self):
535
519
("http://foo/" , ".local" , True ),
536
520
("http://barfoo.com" , ".foo.com" , False ),
537
521
("http://barfoo.com" , "foo.com" , False ),
538
- ]:
539
- request = urllib .request .Request (url )
540
- r = pol .domain_return_ok (domain , request )
541
- if ok : self .assertTrue (r )
542
- else : self .assertFalse (r )
522
+ ])
523
+ def test_domain_return_ok (self , url , domain , ok ):
524
+ # test optimization: .domain_return_ok() should filter out most
525
+ # domains in the CookieJar before we try to access them (because that
526
+ # may require disk access -- in particular, with MSIECookieJar)
527
+ # This is only a rough check for performance reasons, so it's not too
528
+ # critical as long as it's sufficiently liberal.
529
+ pol = DefaultCookiePolicy ()
530
+ request = urllib .request .Request (url )
531
+ r = pol .domain_return_ok (domain , request )
532
+ if ok : self .assertTrue (r )
533
+ else : self .assertFalse (r )
543
534
544
535
def test_missing_value (self ):
545
536
# missing = sign in Cookie: header is regarded by Mozilla as a missing
@@ -573,10 +564,7 @@ def test_missing_value(self):
573
564
self .assertEqual (interact_netscape (c , "http://www.acme.com/foo/" ),
574
565
'"spam"; eggs' )
575
566
576
- def test_rfc2109_handling (self ):
577
- # RFC 2109 cookies are handled as RFC 2965 or Netscape cookies,
578
- # dependent on policy settings
579
- for rfc2109_as_netscape , rfc2965 , version in [
567
+ @support .subTests ('rfc2109_as_netscape,rfc2965,version' , [
580
568
# default according to rfc2965 if not explicitly specified
581
569
(None , False , 0 ),
582
570
(None , True , 1 ),
@@ -585,24 +573,27 @@ def test_rfc2109_handling(self):
585
573
(False , True , 1 ),
586
574
(True , False , 0 ),
587
575
(True , True , 0 ),
588
- ]:
589
- policy = DefaultCookiePolicy (
590
- rfc2109_as_netscape = rfc2109_as_netscape ,
591
- rfc2965 = rfc2965 )
592
- c = CookieJar (policy )
593
- interact_netscape (c , "http://www.example.com/" , "ni=ni; Version=1" )
594
- try :
595
- cookie = c ._cookies ["www.example.com" ]["/" ]["ni" ]
596
- except KeyError :
597
- self .assertIsNone (version ) # didn't expect a stored cookie
598
- else :
599
- self .assertEqual (cookie .version , version )
600
- # 2965 cookies are unaffected
601
- interact_2965 (c , "http://www.example.com/" ,
602
- "foo=bar; Version=1" )
603
- if rfc2965 :
604
- cookie2965 = c ._cookies ["www.example.com" ]["/" ]["foo" ]
605
- self .assertEqual (cookie2965 .version , 1 )
576
+ ])
577
+ def test_rfc2109_handling (self , rfc2109_as_netscape , rfc2965 , version ):
578
+ # RFC 2109 cookies are handled as RFC 2965 or Netscape cookies,
579
+ # dependent on policy settings
580
+ policy = DefaultCookiePolicy(
581
+ rfc2109_as_netscape = rfc2109_as_netscape ,
582
+ rfc2965 = rfc2965 )
583
+ c = CookieJar (policy )
584
+ interact_netscape (c , "http://www.example.com/" , "ni=ni; Version=1" )
585
+ try :
586
+ cookie = c ._cookies ["www.example.com" ]["/" ]["ni" ]
587
+ except KeyError :
588
+ self .assertIsNone (version ) # didn't expect a stored cookie
589
+ else :
590
+ self .assertEqual (cookie .version , version )
591
+ # 2965 cookies are unaffected
592
+ interact_2965 (c , "http://www.example.com/" ,
593
+ "foo=bar; Version=1" )
594
+ if rfc2965 :
595
+ cookie2965 = c ._cookies ["www.example.com" ]["/" ]["foo" ]
596
+ self .assertEqual (cookie2965 .version , 1 )
606
597
607
598
def test_ns_parser (self ):
608
599
c = CookieJar ()
@@ -770,8 +761,7 @@ def test_default_path_with_query(self):
770
761
# Cookie is sent back to the same URI.
771
762
self .assertEqual (interact_netscape (cj , uri ), value )
772
763
773
- def test_escape_path (self ):
774
- cases = [
764
+ @support .subTests ('arg,result' , [
775
765
# quoted safe
776
766
("/foo%2f/bar" , "/foo%2F/bar" ),
777
767
("/foo%2F/bar" , "/foo%2F/bar" ),
@@ -791,9 +781,9 @@ def test_escape_path(self):
791
781
("/foo/bar\u00fc " , "/foo/bar%C3%BC" ), # UTF-8 encoded
792
782
# unicode
793
783
("/foo/bar\uabcd " , "/foo/bar%EA%AF%8D" ), # UTF-8 encoded
794
- ]
795
- for arg , result in cases :
796
- self .assertEqual (escape_path (arg ), result )
784
+ ])
785
+ def test_escape_path ( self , arg , result ) :
786
+ self .assertEqual (escape_path (arg ), result )
797
787
798
788
def test_request_path (self ):
799
789
# with parameters
0 commit comments