Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 1 | import unittest |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 2 | import urllib.parse |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 3 | |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 4 | RFC1808_BASE = "http://a/b/c/d;p?q#f" |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 5 | RFC2396_BASE = "http://a/b/c/d;p?q" |
Senthil Kumaran | dd3820f | 2010-05-07 04:19:23 +0000 | [diff] [blame] | 6 | RFC3986_BASE = 'http://a/b/c/d;p?q' |
Senthil Kumaran | aa69d4d | 2010-07-14 10:21:22 +0000 | [diff] [blame] | 7 | SIMPLE_BASE = 'http://a/b/c/d' |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 8 | |
Senthil Kumaran | 257b980 | 2017-04-04 21:19:43 -0700 | [diff] [blame] | 9 | # Each parse_qsl testcase is a two-tuple that contains |
| 10 | # a string with the query and a list with the expected result. |
Facundo Batista | c469d4c | 2008-09-03 22:49:01 +0000 | [diff] [blame] | 11 | |
| 12 | parse_qsl_test_cases = [ |
| 13 | ("", []), |
| 14 | ("&", []), |
| 15 | ("&&", []), |
| 16 | ("=", [('', '')]), |
| 17 | ("=a", [('', 'a')]), |
| 18 | ("a", [('a', '')]), |
| 19 | ("a=", [('a', '')]), |
Facundo Batista | c469d4c | 2008-09-03 22:49:01 +0000 | [diff] [blame] | 20 | ("&a=b", [('a', 'b')]), |
| 21 | ("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]), |
| 22 | ("a=1&a=2", [('a', '1'), ('a', '2')]), |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 23 | (b"", []), |
| 24 | (b"&", []), |
| 25 | (b"&&", []), |
| 26 | (b"=", [(b'', b'')]), |
| 27 | (b"=a", [(b'', b'a')]), |
| 28 | (b"a", [(b'a', b'')]), |
| 29 | (b"a=", [(b'a', b'')]), |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 30 | (b"&a=b", [(b'a', b'b')]), |
| 31 | (b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]), |
| 32 | (b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]), |
Senthil Kumaran | e38415e | 2016-04-16 07:33:15 -0700 | [diff] [blame] | 33 | (";", []), |
| 34 | (";;", []), |
| 35 | (";a=b", [('a', 'b')]), |
| 36 | ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]), |
| 37 | ("a=1;a=2", [('a', '1'), ('a', '2')]), |
| 38 | (b";", []), |
| 39 | (b";;", []), |
| 40 | (b";a=b", [(b'a', b'b')]), |
| 41 | (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]), |
| 42 | (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]), |
| 43 | ] |
| 44 | |
Senthil Kumaran | 257b980 | 2017-04-04 21:19:43 -0700 | [diff] [blame] | 45 | # Each parse_qs testcase is a two-tuple that contains |
| 46 | # a string with the query and a dictionary with the expected result. |
| 47 | |
Senthil Kumaran | e38415e | 2016-04-16 07:33:15 -0700 | [diff] [blame] | 48 | parse_qs_test_cases = [ |
| 49 | ("", {}), |
| 50 | ("&", {}), |
| 51 | ("&&", {}), |
| 52 | ("=", {'': ['']}), |
| 53 | ("=a", {'': ['a']}), |
| 54 | ("a", {'a': ['']}), |
| 55 | ("a=", {'a': ['']}), |
| 56 | ("&a=b", {'a': ['b']}), |
| 57 | ("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}), |
| 58 | ("a=1&a=2", {'a': ['1', '2']}), |
| 59 | (b"", {}), |
| 60 | (b"&", {}), |
| 61 | (b"&&", {}), |
| 62 | (b"=", {b'': [b'']}), |
| 63 | (b"=a", {b'': [b'a']}), |
| 64 | (b"a", {b'a': [b'']}), |
| 65 | (b"a=", {b'a': [b'']}), |
| 66 | (b"&a=b", {b'a': [b'b']}), |
| 67 | (b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}), |
| 68 | (b"a=1&a=2", {b'a': [b'1', b'2']}), |
| 69 | (";", {}), |
| 70 | (";;", {}), |
| 71 | (";a=b", {'a': ['b']}), |
| 72 | ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}), |
| 73 | ("a=1;a=2", {'a': ['1', '2']}), |
| 74 | (b";", {}), |
| 75 | (b";;", {}), |
| 76 | (b";a=b", {b'a': [b'b']}), |
| 77 | (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}), |
| 78 | (b"a=1;a=2", {b'a': [b'1', b'2']}), |
Facundo Batista | c469d4c | 2008-09-03 22:49:01 +0000 | [diff] [blame] | 79 | ] |
| 80 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 81 | class UrlParseTestCase(unittest.TestCase): |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 82 | |
| 83 | def checkRoundtrips(self, url, parsed, split): |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 84 | result = urllib.parse.urlparse(url) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 85 | self.assertEqual(result, parsed) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 86 | t = (result.scheme, result.netloc, result.path, |
| 87 | result.params, result.query, result.fragment) |
| 88 | self.assertEqual(t, parsed) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 89 | # put it back together and it should be the same |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 90 | result2 = urllib.parse.urlunparse(result) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 91 | self.assertEqual(result2, url) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 92 | self.assertEqual(result2, result.geturl()) |
| 93 | |
| 94 | # the result of geturl() is a fixpoint; we can always parse it |
| 95 | # again to get the same result: |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 96 | result3 = urllib.parse.urlparse(result.geturl()) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 97 | self.assertEqual(result3.geturl(), result.geturl()) |
| 98 | self.assertEqual(result3, result) |
| 99 | self.assertEqual(result3.scheme, result.scheme) |
| 100 | self.assertEqual(result3.netloc, result.netloc) |
| 101 | self.assertEqual(result3.path, result.path) |
| 102 | self.assertEqual(result3.params, result.params) |
| 103 | self.assertEqual(result3.query, result.query) |
| 104 | self.assertEqual(result3.fragment, result.fragment) |
| 105 | self.assertEqual(result3.username, result.username) |
| 106 | self.assertEqual(result3.password, result.password) |
| 107 | self.assertEqual(result3.hostname, result.hostname) |
| 108 | self.assertEqual(result3.port, result.port) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 109 | |
| 110 | # check the roundtrip using urlsplit() as well |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 111 | result = urllib.parse.urlsplit(url) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 112 | self.assertEqual(result, split) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 113 | t = (result.scheme, result.netloc, result.path, |
| 114 | result.query, result.fragment) |
| 115 | self.assertEqual(t, split) |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 116 | result2 = urllib.parse.urlunsplit(result) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 117 | self.assertEqual(result2, url) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 118 | self.assertEqual(result2, result.geturl()) |
| 119 | |
| 120 | # check the fixpoint property of re-parsing the result of geturl() |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 121 | result3 = urllib.parse.urlsplit(result.geturl()) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 122 | self.assertEqual(result3.geturl(), result.geturl()) |
| 123 | self.assertEqual(result3, result) |
| 124 | self.assertEqual(result3.scheme, result.scheme) |
| 125 | self.assertEqual(result3.netloc, result.netloc) |
| 126 | self.assertEqual(result3.path, result.path) |
| 127 | self.assertEqual(result3.query, result.query) |
| 128 | self.assertEqual(result3.fragment, result.fragment) |
| 129 | self.assertEqual(result3.username, result.username) |
| 130 | self.assertEqual(result3.password, result.password) |
| 131 | self.assertEqual(result3.hostname, result.hostname) |
| 132 | self.assertEqual(result3.port, result.port) |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 133 | |
Facundo Batista | c469d4c | 2008-09-03 22:49:01 +0000 | [diff] [blame] | 134 | def test_qsl(self): |
| 135 | for orig, expect in parse_qsl_test_cases: |
| 136 | result = urllib.parse.parse_qsl(orig, keep_blank_values=True) |
Senthil Kumaran | de02a71 | 2011-07-23 18:27:45 +0800 | [diff] [blame] | 137 | self.assertEqual(result, expect, "Error parsing %r" % orig) |
| 138 | expect_without_blanks = [v for v in expect if len(v[1])] |
| 139 | result = urllib.parse.parse_qsl(orig, keep_blank_values=False) |
| 140 | self.assertEqual(result, expect_without_blanks, |
| 141 | "Error parsing %r" % orig) |
Facundo Batista | c469d4c | 2008-09-03 22:49:01 +0000 | [diff] [blame] | 142 | |
Senthil Kumaran | e38415e | 2016-04-16 07:33:15 -0700 | [diff] [blame] | 143 | def test_qs(self): |
| 144 | for orig, expect in parse_qs_test_cases: |
| 145 | result = urllib.parse.parse_qs(orig, keep_blank_values=True) |
| 146 | self.assertEqual(result, expect, "Error parsing %r" % orig) |
| 147 | expect_without_blanks = {v: expect[v] |
| 148 | for v in expect if len(expect[v][0])} |
| 149 | result = urllib.parse.parse_qs(orig, keep_blank_values=False) |
| 150 | self.assertEqual(result, expect_without_blanks, |
| 151 | "Error parsing %r" % orig) |
| 152 | |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 153 | def test_roundtrips(self): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 154 | str_cases = [ |
Fred Drake | 7070565 | 2002-10-16 21:02:36 +0000 | [diff] [blame] | 155 | ('file:///tmp/junk.txt', |
| 156 | ('file', '', '/tmp/junk.txt', '', '', ''), |
| 157 | ('file', '', '/tmp/junk.txt', '', '')), |
Neal Norwitz | 68b539e | 2003-01-06 06:58:31 +0000 | [diff] [blame] | 158 | ('imap://mail.python.org/mbox1', |
| 159 | ('imap', 'mail.python.org', '/mbox1', '', '', ''), |
| 160 | ('imap', 'mail.python.org', '/mbox1', '', '')), |
Skip Montanaro | f09b88e | 2003-01-06 20:27:03 +0000 | [diff] [blame] | 161 | ('mms://wms.sys.hinet.net/cts/Drama/09006251100.asf', |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 162 | ('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf', |
| 163 | '', '', ''), |
| 164 | ('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf', |
| 165 | '', '')), |
Senthil Kumaran | eaaec27 | 2009-03-30 21:54:41 +0000 | [diff] [blame] | 166 | ('nfs://server/path/to/file.txt', |
| 167 | ('nfs', 'server', '/path/to/file.txt', '', '', ''), |
| 168 | ('nfs', 'server', '/path/to/file.txt', '', '')), |
Fred Drake | 50747fc | 2005-07-29 15:56:32 +0000 | [diff] [blame] | 169 | ('svn+ssh://svn.zope.org/repos/main/ZConfig/trunk/', |
| 170 | ('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/', |
| 171 | '', '', ''), |
| 172 | ('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/', |
Senthil Kumaran | ead169d | 2010-05-13 03:37:23 +0000 | [diff] [blame] | 173 | '', '')), |
| 174 | ('git+ssh://git@github.com/user/project.git', |
| 175 | ('git+ssh', 'git@github.com','/user/project.git', |
| 176 | '','',''), |
| 177 | ('git+ssh', 'git@github.com','/user/project.git', |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 178 | '', '')), |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 179 | ] |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 180 | def _encode(t): |
| 181 | return (t[0].encode('ascii'), |
| 182 | tuple(x.encode('ascii') for x in t[1]), |
| 183 | tuple(x.encode('ascii') for x in t[2])) |
| 184 | bytes_cases = [_encode(x) for x in str_cases] |
| 185 | for url, parsed, split in str_cases + bytes_cases: |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 186 | self.checkRoundtrips(url, parsed, split) |
Michael W. Hudson | bd3e771 | 2002-03-18 13:06:00 +0000 | [diff] [blame] | 187 | |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 188 | def test_http_roundtrips(self): |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 189 | # urllib.parse.urlsplit treats 'http:' as an optimized special case, |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 190 | # so we test both 'http:' and 'https:' in all the following. |
| 191 | # Three cheers for white box knowledge! |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 192 | str_cases = [ |
Johannes Gijsbers | 41e4faa | 2005-01-09 15:29:10 +0000 | [diff] [blame] | 193 | ('://www.python.org', |
| 194 | ('www.python.org', '', '', '', ''), |
| 195 | ('www.python.org', '', '', '')), |
| 196 | ('://www.python.org#abc', |
| 197 | ('www.python.org', '', '', '', 'abc'), |
| 198 | ('www.python.org', '', '', 'abc')), |
| 199 | ('://www.python.org?q=abc', |
| 200 | ('www.python.org', '', '', 'q=abc', ''), |
| 201 | ('www.python.org', '', 'q=abc', '')), |
| 202 | ('://www.python.org/#abc', |
| 203 | ('www.python.org', '/', '', '', 'abc'), |
| 204 | ('www.python.org', '/', '', 'abc')), |
| 205 | ('://a/b/c/d;p?q#f', |
| 206 | ('a', '/b/c/d', 'p', 'q', 'f'), |
| 207 | ('a', '/b/c/d;p', 'q', 'f')), |
| 208 | ] |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 209 | def _encode(t): |
| 210 | return (t[0].encode('ascii'), |
| 211 | tuple(x.encode('ascii') for x in t[1]), |
| 212 | tuple(x.encode('ascii') for x in t[2])) |
| 213 | bytes_cases = [_encode(x) for x in str_cases] |
| 214 | str_schemes = ('http', 'https') |
| 215 | bytes_schemes = (b'http', b'https') |
| 216 | str_tests = str_schemes, str_cases |
| 217 | bytes_tests = bytes_schemes, bytes_cases |
| 218 | for schemes, test_cases in (str_tests, bytes_tests): |
| 219 | for scheme in schemes: |
| 220 | for url, parsed, split in test_cases: |
| 221 | url = scheme + url |
| 222 | parsed = (scheme,) + parsed |
| 223 | split = (scheme,) + split |
| 224 | self.checkRoundtrips(url, parsed, split) |
Fred Drake | 7070565 | 2002-10-16 21:02:36 +0000 | [diff] [blame] | 225 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 226 | def checkJoin(self, base, relurl, expected): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 227 | str_components = (base, relurl, expected) |
| 228 | self.assertEqual(urllib.parse.urljoin(base, relurl), expected) |
| 229 | bytes_components = baseb, relurlb, expectedb = [ |
| 230 | x.encode('ascii') for x in str_components] |
| 231 | self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) |
Guido van Rossum | bbc0568 | 2002-10-14 19:59:54 +0000 | [diff] [blame] | 232 | |
| 233 | def test_unparse_parse(self): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 234 | str_cases = ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',] |
| 235 | bytes_cases = [x.encode('ascii') for x in str_cases] |
| 236 | for u in str_cases + bytes_cases: |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 237 | self.assertEqual(urllib.parse.urlunsplit(urllib.parse.urlsplit(u)), u) |
| 238 | self.assertEqual(urllib.parse.urlunparse(urllib.parse.urlparse(u)), u) |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 239 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 240 | def test_RFC1808(self): |
| 241 | # "normal" cases from RFC 1808: |
| 242 | self.checkJoin(RFC1808_BASE, 'g:h', 'g:h') |
| 243 | self.checkJoin(RFC1808_BASE, 'g', 'http://a/b/c/g') |
| 244 | self.checkJoin(RFC1808_BASE, './g', 'http://a/b/c/g') |
| 245 | self.checkJoin(RFC1808_BASE, 'g/', 'http://a/b/c/g/') |
| 246 | self.checkJoin(RFC1808_BASE, '/g', 'http://a/g') |
| 247 | self.checkJoin(RFC1808_BASE, '//g', 'http://g') |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 248 | self.checkJoin(RFC1808_BASE, 'g?y', 'http://a/b/c/g?y') |
| 249 | self.checkJoin(RFC1808_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x') |
| 250 | self.checkJoin(RFC1808_BASE, '#s', 'http://a/b/c/d;p?q#s') |
| 251 | self.checkJoin(RFC1808_BASE, 'g#s', 'http://a/b/c/g#s') |
| 252 | self.checkJoin(RFC1808_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x') |
| 253 | self.checkJoin(RFC1808_BASE, 'g?y#s', 'http://a/b/c/g?y#s') |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 254 | self.checkJoin(RFC1808_BASE, 'g;x', 'http://a/b/c/g;x') |
| 255 | self.checkJoin(RFC1808_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s') |
| 256 | self.checkJoin(RFC1808_BASE, '.', 'http://a/b/c/') |
| 257 | self.checkJoin(RFC1808_BASE, './', 'http://a/b/c/') |
| 258 | self.checkJoin(RFC1808_BASE, '..', 'http://a/b/') |
| 259 | self.checkJoin(RFC1808_BASE, '../', 'http://a/b/') |
| 260 | self.checkJoin(RFC1808_BASE, '../g', 'http://a/b/g') |
| 261 | self.checkJoin(RFC1808_BASE, '../..', 'http://a/') |
| 262 | self.checkJoin(RFC1808_BASE, '../../', 'http://a/') |
| 263 | self.checkJoin(RFC1808_BASE, '../../g', 'http://a/g') |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 264 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 265 | # "abnormal" cases from RFC 1808: |
| 266 | self.checkJoin(RFC1808_BASE, '', 'http://a/b/c/d;p?q#f') |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 267 | self.checkJoin(RFC1808_BASE, 'g.', 'http://a/b/c/g.') |
| 268 | self.checkJoin(RFC1808_BASE, '.g', 'http://a/b/c/.g') |
| 269 | self.checkJoin(RFC1808_BASE, 'g..', 'http://a/b/c/g..') |
| 270 | self.checkJoin(RFC1808_BASE, '..g', 'http://a/b/c/..g') |
| 271 | self.checkJoin(RFC1808_BASE, './../g', 'http://a/b/g') |
| 272 | self.checkJoin(RFC1808_BASE, './g/.', 'http://a/b/c/g/') |
| 273 | self.checkJoin(RFC1808_BASE, 'g/./h', 'http://a/b/c/g/h') |
| 274 | self.checkJoin(RFC1808_BASE, 'g/../h', 'http://a/b/c/h') |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 275 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 276 | # RFC 1808 and RFC 1630 disagree on these (according to RFC 1808), |
| 277 | # so we'll not actually run these tests (which expect 1808 behavior). |
| 278 | #self.checkJoin(RFC1808_BASE, 'http:g', 'http:g') |
| 279 | #self.checkJoin(RFC1808_BASE, 'http:', 'http:') |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 280 | |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 281 | # XXX: The following tests are no longer compatible with RFC3986 |
| 282 | # self.checkJoin(RFC1808_BASE, '../../../g', 'http://a/../g') |
| 283 | # self.checkJoin(RFC1808_BASE, '../../../../g', 'http://a/../../g') |
| 284 | # self.checkJoin(RFC1808_BASE, '/./g', 'http://a/./g') |
| 285 | # self.checkJoin(RFC1808_BASE, '/../g', 'http://a/../g') |
| 286 | |
| 287 | |
Senthil Kumaran | 397eb44 | 2011-04-15 18:20:24 +0800 | [diff] [blame] | 288 | def test_RFC2368(self): |
| 289 | # Issue 11467: path that starts with a number is not parsed correctly |
| 290 | self.assertEqual(urllib.parse.urlparse('mailto:1337@example.org'), |
| 291 | ('mailto', '', '1337@example.org', '', '', '')) |
| 292 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 293 | def test_RFC2396(self): |
| 294 | # cases from RFC 2396 |
Fred Drake | a4d18a0 | 2001-01-05 05:57:04 +0000 | [diff] [blame] | 295 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 296 | self.checkJoin(RFC2396_BASE, 'g:h', 'g:h') |
| 297 | self.checkJoin(RFC2396_BASE, 'g', 'http://a/b/c/g') |
| 298 | self.checkJoin(RFC2396_BASE, './g', 'http://a/b/c/g') |
| 299 | self.checkJoin(RFC2396_BASE, 'g/', 'http://a/b/c/g/') |
| 300 | self.checkJoin(RFC2396_BASE, '/g', 'http://a/g') |
| 301 | self.checkJoin(RFC2396_BASE, '//g', 'http://g') |
| 302 | self.checkJoin(RFC2396_BASE, 'g?y', 'http://a/b/c/g?y') |
| 303 | self.checkJoin(RFC2396_BASE, '#s', 'http://a/b/c/d;p?q#s') |
| 304 | self.checkJoin(RFC2396_BASE, 'g#s', 'http://a/b/c/g#s') |
| 305 | self.checkJoin(RFC2396_BASE, 'g?y#s', 'http://a/b/c/g?y#s') |
| 306 | self.checkJoin(RFC2396_BASE, 'g;x', 'http://a/b/c/g;x') |
| 307 | self.checkJoin(RFC2396_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s') |
| 308 | self.checkJoin(RFC2396_BASE, '.', 'http://a/b/c/') |
| 309 | self.checkJoin(RFC2396_BASE, './', 'http://a/b/c/') |
| 310 | self.checkJoin(RFC2396_BASE, '..', 'http://a/b/') |
| 311 | self.checkJoin(RFC2396_BASE, '../', 'http://a/b/') |
| 312 | self.checkJoin(RFC2396_BASE, '../g', 'http://a/b/g') |
| 313 | self.checkJoin(RFC2396_BASE, '../..', 'http://a/') |
| 314 | self.checkJoin(RFC2396_BASE, '../../', 'http://a/') |
| 315 | self.checkJoin(RFC2396_BASE, '../../g', 'http://a/g') |
| 316 | self.checkJoin(RFC2396_BASE, '', RFC2396_BASE) |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 317 | self.checkJoin(RFC2396_BASE, 'g.', 'http://a/b/c/g.') |
| 318 | self.checkJoin(RFC2396_BASE, '.g', 'http://a/b/c/.g') |
| 319 | self.checkJoin(RFC2396_BASE, 'g..', 'http://a/b/c/g..') |
| 320 | self.checkJoin(RFC2396_BASE, '..g', 'http://a/b/c/..g') |
| 321 | self.checkJoin(RFC2396_BASE, './../g', 'http://a/b/g') |
| 322 | self.checkJoin(RFC2396_BASE, './g/.', 'http://a/b/c/g/') |
| 323 | self.checkJoin(RFC2396_BASE, 'g/./h', 'http://a/b/c/g/h') |
| 324 | self.checkJoin(RFC2396_BASE, 'g/../h', 'http://a/b/c/h') |
| 325 | self.checkJoin(RFC2396_BASE, 'g;x=1/./y', 'http://a/b/c/g;x=1/y') |
| 326 | self.checkJoin(RFC2396_BASE, 'g;x=1/../y', 'http://a/b/c/y') |
| 327 | self.checkJoin(RFC2396_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x') |
| 328 | self.checkJoin(RFC2396_BASE, 'g?y/../x', 'http://a/b/c/g?y/../x') |
| 329 | self.checkJoin(RFC2396_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x') |
| 330 | self.checkJoin(RFC2396_BASE, 'g#s/../x', 'http://a/b/c/g#s/../x') |
| 331 | |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 332 | # XXX: The following tests are no longer compatible with RFC3986 |
| 333 | # self.checkJoin(RFC2396_BASE, '../../../g', 'http://a/../g') |
| 334 | # self.checkJoin(RFC2396_BASE, '../../../../g', 'http://a/../../g') |
| 335 | # self.checkJoin(RFC2396_BASE, '/./g', 'http://a/./g') |
| 336 | # self.checkJoin(RFC2396_BASE, '/../g', 'http://a/../g') |
| 337 | |
Facundo Batista | 23e3856 | 2008-08-14 16:55:14 +0000 | [diff] [blame] | 338 | def test_RFC3986(self): |
| 339 | self.checkJoin(RFC3986_BASE, '?y','http://a/b/c/d;p?y') |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 340 | self.checkJoin(RFC3986_BASE, ';x', 'http://a/b/c/;x') |
Senthil Kumaran | dd3820f | 2010-05-07 04:19:23 +0000 | [diff] [blame] | 341 | self.checkJoin(RFC3986_BASE, 'g:h','g:h') |
| 342 | self.checkJoin(RFC3986_BASE, 'g','http://a/b/c/g') |
| 343 | self.checkJoin(RFC3986_BASE, './g','http://a/b/c/g') |
| 344 | self.checkJoin(RFC3986_BASE, 'g/','http://a/b/c/g/') |
| 345 | self.checkJoin(RFC3986_BASE, '/g','http://a/g') |
| 346 | self.checkJoin(RFC3986_BASE, '//g','http://g') |
| 347 | self.checkJoin(RFC3986_BASE, '?y','http://a/b/c/d;p?y') |
| 348 | self.checkJoin(RFC3986_BASE, 'g?y','http://a/b/c/g?y') |
| 349 | self.checkJoin(RFC3986_BASE, '#s','http://a/b/c/d;p?q#s') |
| 350 | self.checkJoin(RFC3986_BASE, 'g#s','http://a/b/c/g#s') |
| 351 | self.checkJoin(RFC3986_BASE, 'g?y#s','http://a/b/c/g?y#s') |
| 352 | self.checkJoin(RFC3986_BASE, ';x','http://a/b/c/;x') |
| 353 | self.checkJoin(RFC3986_BASE, 'g;x','http://a/b/c/g;x') |
| 354 | self.checkJoin(RFC3986_BASE, 'g;x?y#s','http://a/b/c/g;x?y#s') |
| 355 | self.checkJoin(RFC3986_BASE, '','http://a/b/c/d;p?q') |
| 356 | self.checkJoin(RFC3986_BASE, '.','http://a/b/c/') |
| 357 | self.checkJoin(RFC3986_BASE, './','http://a/b/c/') |
| 358 | self.checkJoin(RFC3986_BASE, '..','http://a/b/') |
| 359 | self.checkJoin(RFC3986_BASE, '../','http://a/b/') |
| 360 | self.checkJoin(RFC3986_BASE, '../g','http://a/b/g') |
| 361 | self.checkJoin(RFC3986_BASE, '../..','http://a/') |
| 362 | self.checkJoin(RFC3986_BASE, '../../','http://a/') |
| 363 | self.checkJoin(RFC3986_BASE, '../../g','http://a/g') |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 364 | self.checkJoin(RFC3986_BASE, '../../../g', 'http://a/g') |
Senthil Kumaran | dd3820f | 2010-05-07 04:19:23 +0000 | [diff] [blame] | 365 | |
Senthil Kumaran | 257b980 | 2017-04-04 21:19:43 -0700 | [diff] [blame] | 366 | # Abnormal Examples |
Senthil Kumaran | dd3820f | 2010-05-07 04:19:23 +0000 | [diff] [blame] | 367 | |
| 368 | # The 'abnormal scenarios' are incompatible with RFC2986 parsing |
| 369 | # Tests are here for reference. |
| 370 | |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 371 | self.checkJoin(RFC3986_BASE, '../../../g','http://a/g') |
| 372 | self.checkJoin(RFC3986_BASE, '../../../../g','http://a/g') |
| 373 | self.checkJoin(RFC3986_BASE, '/./g','http://a/g') |
| 374 | self.checkJoin(RFC3986_BASE, '/../g','http://a/g') |
Senthil Kumaran | dd3820f | 2010-05-07 04:19:23 +0000 | [diff] [blame] | 375 | self.checkJoin(RFC3986_BASE, 'g.','http://a/b/c/g.') |
| 376 | self.checkJoin(RFC3986_BASE, '.g','http://a/b/c/.g') |
| 377 | self.checkJoin(RFC3986_BASE, 'g..','http://a/b/c/g..') |
| 378 | self.checkJoin(RFC3986_BASE, '..g','http://a/b/c/..g') |
| 379 | self.checkJoin(RFC3986_BASE, './../g','http://a/b/g') |
| 380 | self.checkJoin(RFC3986_BASE, './g/.','http://a/b/c/g/') |
| 381 | self.checkJoin(RFC3986_BASE, 'g/./h','http://a/b/c/g/h') |
| 382 | self.checkJoin(RFC3986_BASE, 'g/../h','http://a/b/c/h') |
| 383 | self.checkJoin(RFC3986_BASE, 'g;x=1/./y','http://a/b/c/g;x=1/y') |
| 384 | self.checkJoin(RFC3986_BASE, 'g;x=1/../y','http://a/b/c/y') |
| 385 | self.checkJoin(RFC3986_BASE, 'g?y/./x','http://a/b/c/g?y/./x') |
| 386 | self.checkJoin(RFC3986_BASE, 'g?y/../x','http://a/b/c/g?y/../x') |
| 387 | self.checkJoin(RFC3986_BASE, 'g#s/./x','http://a/b/c/g#s/./x') |
| 388 | self.checkJoin(RFC3986_BASE, 'g#s/../x','http://a/b/c/g#s/../x') |
| 389 | #self.checkJoin(RFC3986_BASE, 'http:g','http:g') # strict parser |
| 390 | self.checkJoin(RFC3986_BASE, 'http:g','http://a/b/c/g') #relaxed parser |
Facundo Batista | 23e3856 | 2008-08-14 16:55:14 +0000 | [diff] [blame] | 391 | |
Senthil Kumaran | dca5b86 | 2010-12-17 04:48:45 +0000 | [diff] [blame] | 392 | # Test for issue9721 |
| 393 | self.checkJoin('http://a/b/c/de', ';x','http://a/b/c/;x') |
| 394 | |
Senthil Kumaran | aa69d4d | 2010-07-14 10:21:22 +0000 | [diff] [blame] | 395 | def test_urljoins(self): |
| 396 | self.checkJoin(SIMPLE_BASE, 'g:h','g:h') |
| 397 | self.checkJoin(SIMPLE_BASE, 'http:g','http://a/b/c/g') |
| 398 | self.checkJoin(SIMPLE_BASE, 'http:','http://a/b/c/d') |
| 399 | self.checkJoin(SIMPLE_BASE, 'g','http://a/b/c/g') |
| 400 | self.checkJoin(SIMPLE_BASE, './g','http://a/b/c/g') |
| 401 | self.checkJoin(SIMPLE_BASE, 'g/','http://a/b/c/g/') |
| 402 | self.checkJoin(SIMPLE_BASE, '/g','http://a/g') |
| 403 | self.checkJoin(SIMPLE_BASE, '//g','http://g') |
| 404 | self.checkJoin(SIMPLE_BASE, '?y','http://a/b/c/d?y') |
| 405 | self.checkJoin(SIMPLE_BASE, 'g?y','http://a/b/c/g?y') |
| 406 | self.checkJoin(SIMPLE_BASE, 'g?y/./x','http://a/b/c/g?y/./x') |
| 407 | self.checkJoin(SIMPLE_BASE, '.','http://a/b/c/') |
| 408 | self.checkJoin(SIMPLE_BASE, './','http://a/b/c/') |
| 409 | self.checkJoin(SIMPLE_BASE, '..','http://a/b/') |
| 410 | self.checkJoin(SIMPLE_BASE, '../','http://a/b/') |
| 411 | self.checkJoin(SIMPLE_BASE, '../g','http://a/b/g') |
| 412 | self.checkJoin(SIMPLE_BASE, '../..','http://a/') |
| 413 | self.checkJoin(SIMPLE_BASE, '../../g','http://a/g') |
Senthil Kumaran | aa69d4d | 2010-07-14 10:21:22 +0000 | [diff] [blame] | 414 | self.checkJoin(SIMPLE_BASE, './../g','http://a/b/g') |
| 415 | self.checkJoin(SIMPLE_BASE, './g/.','http://a/b/c/g/') |
Senthil Kumaran | aa69d4d | 2010-07-14 10:21:22 +0000 | [diff] [blame] | 416 | self.checkJoin(SIMPLE_BASE, 'g/./h','http://a/b/c/g/h') |
| 417 | self.checkJoin(SIMPLE_BASE, 'g/../h','http://a/b/c/h') |
| 418 | self.checkJoin(SIMPLE_BASE, 'http:g','http://a/b/c/g') |
| 419 | self.checkJoin(SIMPLE_BASE, 'http:','http://a/b/c/d') |
| 420 | self.checkJoin(SIMPLE_BASE, 'http:?y','http://a/b/c/d?y') |
| 421 | self.checkJoin(SIMPLE_BASE, 'http:g?y','http://a/b/c/g?y') |
| 422 | self.checkJoin(SIMPLE_BASE, 'http:g?y/./x','http://a/b/c/g?y/./x') |
Senthil Kumaran | de02a71 | 2011-07-23 18:27:45 +0800 | [diff] [blame] | 423 | self.checkJoin('http:///', '..','http:///') |
| 424 | self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') |
| 425 | self.checkJoin('', 'http://a/./g', 'http://a/./g') |
Senthil Kumaran | 2a157d2 | 2011-08-03 18:37:22 +0800 | [diff] [blame] | 426 | self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') |
Senthil Kumaran | 7ce71f6 | 2011-08-03 22:08:46 +0800 | [diff] [blame] | 427 | self.checkJoin('svn+ssh://pathtorepo/dir1', 'dir2', 'svn+ssh://pathtorepo/dir2') |
Berker Peksag | f676748 | 2016-09-16 14:43:58 +0300 | [diff] [blame] | 428 | self.checkJoin('ws://a/b','g','ws://a/g') |
| 429 | self.checkJoin('wss://a/b','g','wss://a/g') |
Senthil Kumaran | aa69d4d | 2010-07-14 10:21:22 +0000 | [diff] [blame] | 430 | |
Antoine Pitrou | 55ac5b3 | 2014-08-21 19:16:17 -0400 | [diff] [blame] | 431 | # XXX: The following tests are no longer compatible with RFC3986 |
| 432 | # self.checkJoin(SIMPLE_BASE, '../../../g','http://a/../g') |
| 433 | # self.checkJoin(SIMPLE_BASE, '/./g','http://a/./g') |
| 434 | |
Senthil Kumaran | a66e388 | 2014-09-22 15:49:16 +0800 | [diff] [blame] | 435 | # test for issue22118 duplicate slashes |
| 436 | self.checkJoin(SIMPLE_BASE + '/', 'foo', SIMPLE_BASE + '/foo') |
| 437 | |
| 438 | # Non-RFC-defined tests, covering variations of base and trailing |
| 439 | # slashes |
| 440 | self.checkJoin('http://a/b/c/d/e/', '../../f/g/', 'http://a/b/c/f/g/') |
| 441 | self.checkJoin('http://a/b/c/d/e', '../../f/g/', 'http://a/b/f/g/') |
| 442 | self.checkJoin('http://a/b/c/d/e/', '/../../f/g/', 'http://a/f/g/') |
| 443 | self.checkJoin('http://a/b/c/d/e', '/../../f/g/', 'http://a/f/g/') |
| 444 | self.checkJoin('http://a/b/c/d/e/', '../../f/g', 'http://a/b/c/f/g') |
| 445 | self.checkJoin('http://a/b/', '../../f/g/', 'http://a/f/g/') |
| 446 | |
Berker Peksag | 20416f7 | 2015-04-16 02:31:14 +0300 | [diff] [blame] | 447 | # issue 23703: don't duplicate filename |
| 448 | self.checkJoin('a', 'b', 'b') |
| 449 | |
Senthil Kumaran | ad02d23 | 2010-04-16 03:02:13 +0000 | [diff] [blame] | 450 | def test_RFC2732(self): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 451 | str_cases = [ |
Senthil Kumaran | ad02d23 | 2010-04-16 03:02:13 +0000 | [diff] [blame] | 452 | ('http://Test.python.org:5432/foo/', 'test.python.org', 5432), |
| 453 | ('http://12.34.56.78:5432/foo/', '12.34.56.78', 5432), |
| 454 | ('http://[::1]:5432/foo/', '::1', 5432), |
| 455 | ('http://[dead:beef::1]:5432/foo/', 'dead:beef::1', 5432), |
| 456 | ('http://[dead:beef::]:5432/foo/', 'dead:beef::', 5432), |
| 457 | ('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]:5432/foo/', |
| 458 | 'dead:beef:cafe:5417:affe:8fa3:deaf:feed', 5432), |
| 459 | ('http://[::12.34.56.78]:5432/foo/', '::12.34.56.78', 5432), |
| 460 | ('http://[::ffff:12.34.56.78]:5432/foo/', |
| 461 | '::ffff:12.34.56.78', 5432), |
| 462 | ('http://Test.python.org/foo/', 'test.python.org', None), |
| 463 | ('http://12.34.56.78/foo/', '12.34.56.78', None), |
| 464 | ('http://[::1]/foo/', '::1', None), |
| 465 | ('http://[dead:beef::1]/foo/', 'dead:beef::1', None), |
| 466 | ('http://[dead:beef::]/foo/', 'dead:beef::', None), |
| 467 | ('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/', |
| 468 | 'dead:beef:cafe:5417:affe:8fa3:deaf:feed', None), |
| 469 | ('http://[::12.34.56.78]/foo/', '::12.34.56.78', None), |
| 470 | ('http://[::ffff:12.34.56.78]/foo/', |
| 471 | '::ffff:12.34.56.78', None), |
Serhiy Storchaka | ff97b08 | 2014-01-18 18:30:33 +0200 | [diff] [blame] | 472 | ('http://Test.python.org:/foo/', 'test.python.org', None), |
| 473 | ('http://12.34.56.78:/foo/', '12.34.56.78', None), |
| 474 | ('http://[::1]:/foo/', '::1', None), |
| 475 | ('http://[dead:beef::1]:/foo/', 'dead:beef::1', None), |
| 476 | ('http://[dead:beef::]:/foo/', 'dead:beef::', None), |
| 477 | ('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]:/foo/', |
| 478 | 'dead:beef:cafe:5417:affe:8fa3:deaf:feed', None), |
| 479 | ('http://[::12.34.56.78]:/foo/', '::12.34.56.78', None), |
| 480 | ('http://[::ffff:12.34.56.78]:/foo/', |
| 481 | '::ffff:12.34.56.78', None), |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 482 | ] |
| 483 | def _encode(t): |
| 484 | return t[0].encode('ascii'), t[1].encode('ascii'), t[2] |
| 485 | bytes_cases = [_encode(x) for x in str_cases] |
| 486 | for url, hostname, port in str_cases + bytes_cases: |
Senthil Kumaran | ad02d23 | 2010-04-16 03:02:13 +0000 | [diff] [blame] | 487 | urlparsed = urllib.parse.urlparse(url) |
| 488 | self.assertEqual((urlparsed.hostname, urlparsed.port) , (hostname, port)) |
| 489 | |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 490 | str_cases = [ |
Senthil Kumaran | ad02d23 | 2010-04-16 03:02:13 +0000 | [diff] [blame] | 491 | 'http://::12.34.56.78]/', |
| 492 | 'http://[::1/foo/', |
Senthil Kumaran | 7a1e09f | 2010-04-22 12:19:46 +0000 | [diff] [blame] | 493 | 'ftp://[::1/foo/bad]/bad', |
Senthil Kumaran | 2eaef05 | 2010-04-20 20:42:50 +0000 | [diff] [blame] | 494 | 'http://[::1/foo/bad]/bad', |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 495 | 'http://[::ffff:12.34.56.78'] |
| 496 | bytes_cases = [x.encode('ascii') for x in str_cases] |
| 497 | for invalid_url in str_cases + bytes_cases: |
Senthil Kumaran | 7a1e09f | 2010-04-22 12:19:46 +0000 | [diff] [blame] | 498 | self.assertRaises(ValueError, urllib.parse.urlparse, invalid_url) |
Senthil Kumaran | ad02d23 | 2010-04-16 03:02:13 +0000 | [diff] [blame] | 499 | |
Fred Drake | 7070565 | 2002-10-16 21:02:36 +0000 | [diff] [blame] | 500 | def test_urldefrag(self): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 501 | str_cases = [ |
Fred Drake | 7070565 | 2002-10-16 21:02:36 +0000 | [diff] [blame] | 502 | ('http://python.org#frag', 'http://python.org', 'frag'), |
| 503 | ('http://python.org', 'http://python.org', ''), |
| 504 | ('http://python.org/#frag', 'http://python.org/', 'frag'), |
| 505 | ('http://python.org/', 'http://python.org/', ''), |
| 506 | ('http://python.org/?q#frag', 'http://python.org/?q', 'frag'), |
| 507 | ('http://python.org/?q', 'http://python.org/?q', ''), |
| 508 | ('http://python.org/p#frag', 'http://python.org/p', 'frag'), |
| 509 | ('http://python.org/p?q', 'http://python.org/p?q', ''), |
| 510 | (RFC1808_BASE, 'http://a/b/c/d;p?q', 'f'), |
| 511 | (RFC2396_BASE, 'http://a/b/c/d;p?q', ''), |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 512 | ] |
| 513 | def _encode(t): |
| 514 | return type(t)(x.encode('ascii') for x in t) |
| 515 | bytes_cases = [_encode(x) for x in str_cases] |
| 516 | for url, defrag, frag in str_cases + bytes_cases: |
| 517 | result = urllib.parse.urldefrag(url) |
| 518 | self.assertEqual(result.geturl(), url) |
| 519 | self.assertEqual(result, (defrag, frag)) |
| 520 | self.assertEqual(result.url, defrag) |
| 521 | self.assertEqual(result.fragment, frag) |
Fred Drake | 7070565 | 2002-10-16 21:02:36 +0000 | [diff] [blame] | 522 | |
Коренберг Марк | fbd6051 | 2017-12-21 17:16:17 +0500 | [diff] [blame] | 523 | def test_urlsplit_scoped_IPv6(self): |
| 524 | p = urllib.parse.urlsplit('http://[FE80::822a:a8ff:fe49:470c%tESt]:1234') |
| 525 | self.assertEqual(p.hostname, "fe80::822a:a8ff:fe49:470c%tESt") |
| 526 | self.assertEqual(p.netloc, '[FE80::822a:a8ff:fe49:470c%tESt]:1234') |
| 527 | |
| 528 | p = urllib.parse.urlsplit(b'http://[FE80::822a:a8ff:fe49:470c%tESt]:1234') |
| 529 | self.assertEqual(p.hostname, b"fe80::822a:a8ff:fe49:470c%tESt") |
| 530 | self.assertEqual(p.netloc, b'[FE80::822a:a8ff:fe49:470c%tESt]:1234') |
| 531 | |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 532 | def test_urlsplit_attributes(self): |
| 533 | url = "HTTP://WWW.PYTHON.ORG/doc/#frag" |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 534 | p = urllib.parse.urlsplit(url) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 535 | self.assertEqual(p.scheme, "http") |
| 536 | self.assertEqual(p.netloc, "WWW.PYTHON.ORG") |
| 537 | self.assertEqual(p.path, "/doc/") |
| 538 | self.assertEqual(p.query, "") |
| 539 | self.assertEqual(p.fragment, "frag") |
| 540 | self.assertEqual(p.username, None) |
| 541 | self.assertEqual(p.password, None) |
| 542 | self.assertEqual(p.hostname, "www.python.org") |
| 543 | self.assertEqual(p.port, None) |
| 544 | # geturl() won't return exactly the original URL in this case |
| 545 | # since the scheme is always case-normalized |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 546 | # We handle this by ignoring the first 4 characters of the URL |
| 547 | self.assertEqual(p.geturl()[4:], url[4:]) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 548 | |
| 549 | url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag" |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 550 | p = urllib.parse.urlsplit(url) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 551 | self.assertEqual(p.scheme, "http") |
| 552 | self.assertEqual(p.netloc, "User:Pass@www.python.org:080") |
| 553 | self.assertEqual(p.path, "/doc/") |
| 554 | self.assertEqual(p.query, "query=yes") |
| 555 | self.assertEqual(p.fragment, "frag") |
| 556 | self.assertEqual(p.username, "User") |
| 557 | self.assertEqual(p.password, "Pass") |
| 558 | self.assertEqual(p.hostname, "www.python.org") |
| 559 | self.assertEqual(p.port, 80) |
| 560 | self.assertEqual(p.geturl(), url) |
| 561 | |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 562 | # Addressing issue1698, which suggests Username can contain |
| 563 | # "@" characters. Though not RFC compliant, many ftp sites allow |
| 564 | # and request email addresses as usernames. |
| 565 | |
| 566 | url = "http://User@example.com:Pass@www.python.org:080/doc/?query=yes#frag" |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 567 | p = urllib.parse.urlsplit(url) |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 568 | self.assertEqual(p.scheme, "http") |
| 569 | self.assertEqual(p.netloc, "User@example.com:Pass@www.python.org:080") |
| 570 | self.assertEqual(p.path, "/doc/") |
| 571 | self.assertEqual(p.query, "query=yes") |
| 572 | self.assertEqual(p.fragment, "frag") |
| 573 | self.assertEqual(p.username, "User@example.com") |
| 574 | self.assertEqual(p.password, "Pass") |
| 575 | self.assertEqual(p.hostname, "www.python.org") |
| 576 | self.assertEqual(p.port, 80) |
| 577 | self.assertEqual(p.geturl(), url) |
| 578 | |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 579 | # And check them all again, only with bytes this time |
| 580 | url = b"HTTP://WWW.PYTHON.ORG/doc/#frag" |
| 581 | p = urllib.parse.urlsplit(url) |
| 582 | self.assertEqual(p.scheme, b"http") |
| 583 | self.assertEqual(p.netloc, b"WWW.PYTHON.ORG") |
| 584 | self.assertEqual(p.path, b"/doc/") |
| 585 | self.assertEqual(p.query, b"") |
| 586 | self.assertEqual(p.fragment, b"frag") |
| 587 | self.assertEqual(p.username, None) |
| 588 | self.assertEqual(p.password, None) |
| 589 | self.assertEqual(p.hostname, b"www.python.org") |
| 590 | self.assertEqual(p.port, None) |
| 591 | self.assertEqual(p.geturl()[4:], url[4:]) |
| 592 | |
| 593 | url = b"http://User:Pass@www.python.org:080/doc/?query=yes#frag" |
| 594 | p = urllib.parse.urlsplit(url) |
| 595 | self.assertEqual(p.scheme, b"http") |
| 596 | self.assertEqual(p.netloc, b"User:Pass@www.python.org:080") |
| 597 | self.assertEqual(p.path, b"/doc/") |
| 598 | self.assertEqual(p.query, b"query=yes") |
| 599 | self.assertEqual(p.fragment, b"frag") |
| 600 | self.assertEqual(p.username, b"User") |
| 601 | self.assertEqual(p.password, b"Pass") |
| 602 | self.assertEqual(p.hostname, b"www.python.org") |
| 603 | self.assertEqual(p.port, 80) |
| 604 | self.assertEqual(p.geturl(), url) |
| 605 | |
| 606 | url = b"http://User@example.com:Pass@www.python.org:080/doc/?query=yes#frag" |
| 607 | p = urllib.parse.urlsplit(url) |
| 608 | self.assertEqual(p.scheme, b"http") |
| 609 | self.assertEqual(p.netloc, b"User@example.com:Pass@www.python.org:080") |
| 610 | self.assertEqual(p.path, b"/doc/") |
| 611 | self.assertEqual(p.query, b"query=yes") |
| 612 | self.assertEqual(p.fragment, b"frag") |
| 613 | self.assertEqual(p.username, b"User@example.com") |
| 614 | self.assertEqual(p.password, b"Pass") |
| 615 | self.assertEqual(p.hostname, b"www.python.org") |
| 616 | self.assertEqual(p.port, 80) |
| 617 | self.assertEqual(p.geturl(), url) |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 618 | |
Robert Collins | dfa95c9 | 2015-08-10 09:53:30 +1200 | [diff] [blame] | 619 | # Verify an illegal port raises ValueError |
Senthil Kumaran | 2fc5a50 | 2012-05-24 21:56:17 +0800 | [diff] [blame] | 620 | url = b"HTTP://WWW.PYTHON.ORG:65536/doc/#frag" |
| 621 | p = urllib.parse.urlsplit(url) |
Robert Collins | dfa95c9 | 2015-08-10 09:53:30 +1200 | [diff] [blame] | 622 | with self.assertRaisesRegex(ValueError, "out of range"): |
| 623 | p.port |
Senthil Kumaran | 2fc5a50 | 2012-05-24 21:56:17 +0800 | [diff] [blame] | 624 | |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 625 | def test_attributes_bad_port(self): |
Robert Collins | dfa95c9 | 2015-08-10 09:53:30 +1200 | [diff] [blame] | 626 | """Check handling of invalid ports.""" |
| 627 | for bytes in (False, True): |
| 628 | for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): |
| 629 | for port in ("foo", "1.5", "-1", "0x10"): |
| 630 | with self.subTest(bytes=bytes, parse=parse, port=port): |
| 631 | netloc = "www.example.net:" + port |
| 632 | url = "http://" + netloc |
| 633 | if bytes: |
| 634 | netloc = netloc.encode("ascii") |
| 635 | url = url.encode("ascii") |
| 636 | p = parse(url) |
| 637 | self.assertEqual(p.netloc, netloc) |
| 638 | with self.assertRaises(ValueError): |
| 639 | p.port |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 640 | |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 641 | def test_attributes_without_netloc(self): |
| 642 | # This example is straight from RFC 3261. It looks like it |
| 643 | # should allow the username, hostname, and port to be filled |
| 644 | # in, but doesn't. Since it's a URI and doesn't use the |
| 645 | # scheme://netloc syntax, the netloc and related attributes |
| 646 | # should be left empty. |
| 647 | uri = "sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15" |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 648 | p = urllib.parse.urlsplit(uri) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 649 | self.assertEqual(p.netloc, "") |
| 650 | self.assertEqual(p.username, None) |
| 651 | self.assertEqual(p.password, None) |
| 652 | self.assertEqual(p.hostname, None) |
| 653 | self.assertEqual(p.port, None) |
| 654 | self.assertEqual(p.geturl(), uri) |
| 655 | |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 656 | p = urllib.parse.urlparse(uri) |
Thomas Wouters | 49fd7fa | 2006-04-21 10:40:58 +0000 | [diff] [blame] | 657 | self.assertEqual(p.netloc, "") |
| 658 | self.assertEqual(p.username, None) |
| 659 | self.assertEqual(p.password, None) |
| 660 | self.assertEqual(p.hostname, None) |
| 661 | self.assertEqual(p.port, None) |
| 662 | self.assertEqual(p.geturl(), uri) |
| 663 | |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 664 | # You guessed it, repeating the test with bytes input |
| 665 | uri = b"sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15" |
| 666 | p = urllib.parse.urlsplit(uri) |
| 667 | self.assertEqual(p.netloc, b"") |
| 668 | self.assertEqual(p.username, None) |
| 669 | self.assertEqual(p.password, None) |
| 670 | self.assertEqual(p.hostname, None) |
| 671 | self.assertEqual(p.port, None) |
| 672 | self.assertEqual(p.geturl(), uri) |
| 673 | |
| 674 | p = urllib.parse.urlparse(uri) |
| 675 | self.assertEqual(p.netloc, b"") |
| 676 | self.assertEqual(p.username, None) |
| 677 | self.assertEqual(p.password, None) |
| 678 | self.assertEqual(p.hostname, None) |
| 679 | self.assertEqual(p.port, None) |
| 680 | self.assertEqual(p.geturl(), uri) |
| 681 | |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 682 | def test_noslash(self): |
| 683 | # Issue 1637: http://foo.com?query is legal |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 684 | self.assertEqual(urllib.parse.urlparse("http://example.com?blahblah=/foo"), |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 685 | ('http', 'example.com', '', '', 'blahblah=/foo', '')) |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 686 | self.assertEqual(urllib.parse.urlparse(b"http://example.com?blahblah=/foo"), |
| 687 | (b'http', b'example.com', b'', b'', b'blahblah=/foo', b'')) |
Christian Heimes | faf2f63 | 2008-01-06 16:59:19 +0000 | [diff] [blame] | 688 | |
Senthil Kumaran | 84c7d9f | 2010-08-04 04:50:44 +0000 | [diff] [blame] | 689 | def test_withoutscheme(self): |
| 690 | # Test urlparse without scheme |
| 691 | # Issue 754016: urlparse goes wrong with IP:port without scheme |
| 692 | # RFC 1808 specifies that netloc should start with //, urlparse expects |
| 693 | # the same, otherwise it classifies the portion of url as path. |
| 694 | self.assertEqual(urllib.parse.urlparse("path"), |
| 695 | ('','','path','','','')) |
| 696 | self.assertEqual(urllib.parse.urlparse("//www.python.org:80"), |
| 697 | ('','www.python.org:80','','','','')) |
| 698 | self.assertEqual(urllib.parse.urlparse("http://www.python.org:80"), |
| 699 | ('http','www.python.org:80','','','','')) |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 700 | # Repeat for bytes input |
| 701 | self.assertEqual(urllib.parse.urlparse(b"path"), |
| 702 | (b'',b'',b'path',b'',b'',b'')) |
| 703 | self.assertEqual(urllib.parse.urlparse(b"//www.python.org:80"), |
| 704 | (b'',b'www.python.org:80',b'',b'',b'',b'')) |
| 705 | self.assertEqual(urllib.parse.urlparse(b"http://www.python.org:80"), |
| 706 | (b'http',b'www.python.org:80',b'',b'',b'',b'')) |
Senthil Kumaran | 84c7d9f | 2010-08-04 04:50:44 +0000 | [diff] [blame] | 707 | |
| 708 | def test_portseparator(self): |
| 709 | # Issue 754016 makes changes for port separator ':' from scheme separator |
| 710 | self.assertEqual(urllib.parse.urlparse("path:80"), |
| 711 | ('','','path:80','','','')) |
| 712 | self.assertEqual(urllib.parse.urlparse("http:"),('http','','','','','')) |
| 713 | self.assertEqual(urllib.parse.urlparse("https:"),('https','','','','','')) |
| 714 | self.assertEqual(urllib.parse.urlparse("http://www.python.org:80"), |
| 715 | ('http','www.python.org:80','','','','')) |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 716 | # As usual, need to check bytes input as well |
| 717 | self.assertEqual(urllib.parse.urlparse(b"path:80"), |
| 718 | (b'',b'',b'path:80',b'',b'',b'')) |
| 719 | self.assertEqual(urllib.parse.urlparse(b"http:"),(b'http',b'',b'',b'',b'',b'')) |
| 720 | self.assertEqual(urllib.parse.urlparse(b"https:"),(b'https',b'',b'',b'',b'',b'')) |
| 721 | self.assertEqual(urllib.parse.urlparse(b"http://www.python.org:80"), |
| 722 | (b'http',b'www.python.org:80',b'',b'',b'',b'')) |
Senthil Kumaran | 84c7d9f | 2010-08-04 04:50:44 +0000 | [diff] [blame] | 723 | |
Facundo Batista | 2ac5de2 | 2008-07-07 18:24:11 +0000 | [diff] [blame] | 724 | def test_usingsys(self): |
| 725 | # Issue 3314: sys module is used in the error |
| 726 | self.assertRaises(TypeError, urllib.parse.urlencode, "foo") |
| 727 | |
Senthil Kumaran | 6be85c5 | 2010-02-19 07:42:50 +0000 | [diff] [blame] | 728 | def test_anyscheme(self): |
| 729 | # Issue 7904: s3://foo.com/stuff has netloc "foo.com". |
Ezio Melotti | 5e15efa | 2010-02-19 14:49:02 +0000 | [diff] [blame] | 730 | self.assertEqual(urllib.parse.urlparse("s3://foo.com/stuff"), |
| 731 | ('s3', 'foo.com', '/stuff', '', '', '')) |
| 732 | self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff"), |
| 733 | ('x-newscheme', 'foo.com', '/stuff', '', '', '')) |
Senthil Kumaran | 1be320e | 2012-05-19 08:12:00 +0800 | [diff] [blame] | 734 | self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff?query#fragment"), |
| 735 | ('x-newscheme', 'foo.com', '/stuff', '', 'query', 'fragment')) |
| 736 | self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff?query"), |
| 737 | ('x-newscheme', 'foo.com', '/stuff', '', 'query', '')) |
| 738 | |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 739 | # And for bytes... |
| 740 | self.assertEqual(urllib.parse.urlparse(b"s3://foo.com/stuff"), |
| 741 | (b's3', b'foo.com', b'/stuff', b'', b'', b'')) |
| 742 | self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff"), |
| 743 | (b'x-newscheme', b'foo.com', b'/stuff', b'', b'', b'')) |
Senthil Kumaran | 1be320e | 2012-05-19 08:12:00 +0800 | [diff] [blame] | 744 | self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff?query#fragment"), |
| 745 | (b'x-newscheme', b'foo.com', b'/stuff', b'', b'query', b'fragment')) |
| 746 | self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff?query"), |
| 747 | (b'x-newscheme', b'foo.com', b'/stuff', b'', b'query', b'')) |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 748 | |
Berker Peksag | 89584c9 | 2015-06-25 23:38:48 +0300 | [diff] [blame] | 749 | def test_default_scheme(self): |
| 750 | # Exercise the scheme parameter of urlparse() and urlsplit() |
| 751 | for func in (urllib.parse.urlparse, urllib.parse.urlsplit): |
| 752 | with self.subTest(function=func): |
| 753 | result = func("http://example.net/", "ftp") |
| 754 | self.assertEqual(result.scheme, "http") |
| 755 | result = func(b"http://example.net/", b"ftp") |
| 756 | self.assertEqual(result.scheme, b"http") |
| 757 | self.assertEqual(func("path", "ftp").scheme, "ftp") |
| 758 | self.assertEqual(func("path", scheme="ftp").scheme, "ftp") |
| 759 | self.assertEqual(func(b"path", scheme=b"ftp").scheme, b"ftp") |
| 760 | self.assertEqual(func("path").scheme, "") |
| 761 | self.assertEqual(func(b"path").scheme, b"") |
| 762 | self.assertEqual(func(b"path", "").scheme, b"") |
| 763 | |
| 764 | def test_parse_fragments(self): |
| 765 | # Exercise the allow_fragments parameter of urlparse() and urlsplit() |
| 766 | tests = ( |
postmasters | 90e01e5 | 2017-06-20 06:02:44 -0700 | [diff] [blame] | 767 | ("http:#frag", "path", "frag"), |
| 768 | ("//example.net#frag", "path", "frag"), |
| 769 | ("index.html#frag", "path", "frag"), |
| 770 | (";a=b#frag", "params", "frag"), |
| 771 | ("?a=b#frag", "query", "frag"), |
| 772 | ("#frag", "path", "frag"), |
| 773 | ("abc#@frag", "path", "@frag"), |
| 774 | ("//abc#@frag", "path", "@frag"), |
| 775 | ("//abc:80#@frag", "path", "@frag"), |
| 776 | ("//abc#@frag:80", "path", "@frag:80"), |
Berker Peksag | 89584c9 | 2015-06-25 23:38:48 +0300 | [diff] [blame] | 777 | ) |
postmasters | 90e01e5 | 2017-06-20 06:02:44 -0700 | [diff] [blame] | 778 | for url, attr, expected_frag in tests: |
Berker Peksag | 89584c9 | 2015-06-25 23:38:48 +0300 | [diff] [blame] | 779 | for func in (urllib.parse.urlparse, urllib.parse.urlsplit): |
| 780 | if attr == "params" and func is urllib.parse.urlsplit: |
| 781 | attr = "path" |
| 782 | with self.subTest(url=url, function=func): |
| 783 | result = func(url, allow_fragments=False) |
| 784 | self.assertEqual(result.fragment, "") |
postmasters | 90e01e5 | 2017-06-20 06:02:44 -0700 | [diff] [blame] | 785 | self.assertTrue( |
| 786 | getattr(result, attr).endswith("#" + expected_frag)) |
Berker Peksag | 89584c9 | 2015-06-25 23:38:48 +0300 | [diff] [blame] | 787 | self.assertEqual(func(url, "", False).fragment, "") |
| 788 | |
| 789 | result = func(url, allow_fragments=True) |
postmasters | 90e01e5 | 2017-06-20 06:02:44 -0700 | [diff] [blame] | 790 | self.assertEqual(result.fragment, expected_frag) |
| 791 | self.assertFalse( |
| 792 | getattr(result, attr).endswith(expected_frag)) |
| 793 | self.assertEqual(func(url, "", True).fragment, |
| 794 | expected_frag) |
| 795 | self.assertEqual(func(url).fragment, expected_frag) |
Berker Peksag | 89584c9 | 2015-06-25 23:38:48 +0300 | [diff] [blame] | 796 | |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 797 | def test_mixed_types_rejected(self): |
| 798 | # Several functions that process either strings or ASCII encoded bytes |
| 799 | # accept multiple arguments. Check they reject mixed type input |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 800 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 801 | urllib.parse.urlparse("www.python.org", b"http") |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 802 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 803 | urllib.parse.urlparse(b"www.python.org", "http") |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 804 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 805 | urllib.parse.urlsplit("www.python.org", b"http") |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 806 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 807 | urllib.parse.urlsplit(b"www.python.org", "http") |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 808 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 809 | urllib.parse.urlunparse(( b"http", "www.python.org","","","","")) |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 810 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 811 | urllib.parse.urlunparse(("http", b"www.python.org","","","","")) |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 812 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 813 | urllib.parse.urlunsplit((b"http", "www.python.org","","","")) |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 814 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 815 | urllib.parse.urlunsplit(("http", b"www.python.org","","","")) |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 816 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 817 | urllib.parse.urljoin("http://python.org", b"http://python.org") |
Ezio Melotti | ed3a7d2 | 2010-12-01 02:32:32 +0000 | [diff] [blame] | 818 | with self.assertRaisesRegex(TypeError, "Cannot mix str"): |
Nick Coghlan | 9fc443c | 2010-11-30 15:48:08 +0000 | [diff] [blame] | 819 | urllib.parse.urljoin(b"http://python.org", "http://python.org") |
| 820 | |
| 821 | def _check_result_type(self, str_type): |
| 822 | num_args = len(str_type._fields) |
| 823 | bytes_type = str_type._encoded_counterpart |
| 824 | self.assertIs(bytes_type._decoded_counterpart, str_type) |
| 825 | str_args = ('',) * num_args |
| 826 | bytes_args = (b'',) * num_args |
| 827 | str_result = str_type(*str_args) |
| 828 | bytes_result = bytes_type(*bytes_args) |
| 829 | encoding = 'ascii' |
| 830 | errors = 'strict' |
| 831 | self.assertEqual(str_result, str_args) |
| 832 | self.assertEqual(bytes_result.decode(), str_args) |
| 833 | self.assertEqual(bytes_result.decode(), str_result) |
| 834 | self.assertEqual(bytes_result.decode(encoding), str_args) |
| 835 | self.assertEqual(bytes_result.decode(encoding), str_result) |
| 836 | self.assertEqual(bytes_result.decode(encoding, errors), str_args) |
| 837 | self.assertEqual(bytes_result.decode(encoding, errors), str_result) |
| 838 | self.assertEqual(bytes_result, bytes_args) |
| 839 | self.assertEqual(str_result.encode(), bytes_args) |
| 840 | self.assertEqual(str_result.encode(), bytes_result) |
| 841 | self.assertEqual(str_result.encode(encoding), bytes_args) |
| 842 | self.assertEqual(str_result.encode(encoding), bytes_result) |
| 843 | self.assertEqual(str_result.encode(encoding, errors), bytes_args) |
| 844 | self.assertEqual(str_result.encode(encoding, errors), bytes_result) |
| 845 | |
| 846 | def test_result_pairs(self): |
| 847 | # Check encoding and decoding between result pairs |
| 848 | result_types = [ |
| 849 | urllib.parse.DefragResult, |
| 850 | urllib.parse.SplitResult, |
| 851 | urllib.parse.ParseResult, |
| 852 | ] |
| 853 | for result_type in result_types: |
| 854 | self._check_result_type(result_type) |
| 855 | |
Victor Stinner | 1d87deb | 2011-01-14 13:05:19 +0000 | [diff] [blame] | 856 | def test_parse_qs_encoding(self): |
| 857 | result = urllib.parse.parse_qs("key=\u0141%E9", encoding="latin-1") |
| 858 | self.assertEqual(result, {'key': ['\u0141\xE9']}) |
| 859 | result = urllib.parse.parse_qs("key=\u0141%C3%A9", encoding="utf-8") |
| 860 | self.assertEqual(result, {'key': ['\u0141\xE9']}) |
| 861 | result = urllib.parse.parse_qs("key=\u0141%C3%A9", encoding="ascii") |
| 862 | self.assertEqual(result, {'key': ['\u0141\ufffd\ufffd']}) |
| 863 | result = urllib.parse.parse_qs("key=\u0141%E9-", encoding="ascii") |
| 864 | self.assertEqual(result, {'key': ['\u0141\ufffd-']}) |
| 865 | result = urllib.parse.parse_qs("key=\u0141%E9-", encoding="ascii", |
| 866 | errors="ignore") |
| 867 | self.assertEqual(result, {'key': ['\u0141-']}) |
| 868 | |
| 869 | def test_parse_qsl_encoding(self): |
| 870 | result = urllib.parse.parse_qsl("key=\u0141%E9", encoding="latin-1") |
| 871 | self.assertEqual(result, [('key', '\u0141\xE9')]) |
| 872 | result = urllib.parse.parse_qsl("key=\u0141%C3%A9", encoding="utf-8") |
| 873 | self.assertEqual(result, [('key', '\u0141\xE9')]) |
| 874 | result = urllib.parse.parse_qsl("key=\u0141%C3%A9", encoding="ascii") |
| 875 | self.assertEqual(result, [('key', '\u0141\ufffd\ufffd')]) |
| 876 | result = urllib.parse.parse_qsl("key=\u0141%E9-", encoding="ascii") |
| 877 | self.assertEqual(result, [('key', '\u0141\ufffd-')]) |
| 878 | result = urllib.parse.parse_qsl("key=\u0141%E9-", encoding="ascii", |
| 879 | errors="ignore") |
| 880 | self.assertEqual(result, [('key', '\u0141-')]) |
| 881 | |
Senthil Kumaran | de02a71 | 2011-07-23 18:27:45 +0800 | [diff] [blame] | 882 | def test_urlencode_sequences(self): |
| 883 | # Other tests incidentally urlencode things; test non-covered cases: |
| 884 | # Sequence and object values. |
| 885 | result = urllib.parse.urlencode({'a': [1, 2], 'b': (3, 4, 5)}, True) |
Georg Brandl | 09a7c72 | 2012-02-20 21:31:46 +0100 | [diff] [blame] | 886 | # we cannot rely on ordering here |
| 887 | assert set(result.split('&')) == {'a=1', 'a=2', 'b=3', 'b=4', 'b=5'} |
Senthil Kumaran | de02a71 | 2011-07-23 18:27:45 +0800 | [diff] [blame] | 888 | |
| 889 | class Trivial: |
| 890 | def __str__(self): |
| 891 | return 'trivial' |
| 892 | |
| 893 | result = urllib.parse.urlencode({'a': Trivial()}, True) |
| 894 | self.assertEqual(result, 'a=trivial') |
| 895 | |
R David Murray | c17686f | 2015-05-17 20:44:50 -0400 | [diff] [blame] | 896 | def test_urlencode_quote_via(self): |
| 897 | result = urllib.parse.urlencode({'a': 'some value'}) |
| 898 | self.assertEqual(result, "a=some+value") |
| 899 | result = urllib.parse.urlencode({'a': 'some value/another'}, |
| 900 | quote_via=urllib.parse.quote) |
| 901 | self.assertEqual(result, "a=some%20value%2Fanother") |
| 902 | result = urllib.parse.urlencode({'a': 'some value/another'}, |
| 903 | safe='/', quote_via=urllib.parse.quote) |
| 904 | self.assertEqual(result, "a=some%20value/another") |
| 905 | |
Senthil Kumaran | de02a71 | 2011-07-23 18:27:45 +0800 | [diff] [blame] | 906 | def test_quote_from_bytes(self): |
| 907 | self.assertRaises(TypeError, urllib.parse.quote_from_bytes, 'foo') |
| 908 | result = urllib.parse.quote_from_bytes(b'archaeological arcana') |
| 909 | self.assertEqual(result, 'archaeological%20arcana') |
| 910 | result = urllib.parse.quote_from_bytes(b'') |
| 911 | self.assertEqual(result, '') |
| 912 | |
| 913 | def test_unquote_to_bytes(self): |
| 914 | result = urllib.parse.unquote_to_bytes('abc%20def') |
| 915 | self.assertEqual(result, b'abc def') |
| 916 | result = urllib.parse.unquote_to_bytes('') |
| 917 | self.assertEqual(result, b'') |
| 918 | |
| 919 | def test_quote_errors(self): |
| 920 | self.assertRaises(TypeError, urllib.parse.quote, b'foo', |
| 921 | encoding='utf-8') |
| 922 | self.assertRaises(TypeError, urllib.parse.quote, b'foo', errors='strict') |
Victor Stinner | 1d87deb | 2011-01-14 13:05:19 +0000 | [diff] [blame] | 923 | |
Ezio Melotti | 6709b7d | 2012-05-19 17:15:19 +0300 | [diff] [blame] | 924 | def test_issue14072(self): |
| 925 | p1 = urllib.parse.urlsplit('tel:+31-641044153') |
| 926 | self.assertEqual(p1.scheme, 'tel') |
| 927 | self.assertEqual(p1.path, '+31-641044153') |
| 928 | p2 = urllib.parse.urlsplit('tel:+31641044153') |
| 929 | self.assertEqual(p2.scheme, 'tel') |
| 930 | self.assertEqual(p2.path, '+31641044153') |
Senthil Kumaran | ed30199 | 2012-12-24 14:00:20 -0800 | [diff] [blame] | 931 | # assert the behavior for urlparse |
| 932 | p1 = urllib.parse.urlparse('tel:+31-641044153') |
| 933 | self.assertEqual(p1.scheme, 'tel') |
| 934 | self.assertEqual(p1.path, '+31-641044153') |
| 935 | p2 = urllib.parse.urlparse('tel:+31641044153') |
| 936 | self.assertEqual(p2.scheme, 'tel') |
| 937 | self.assertEqual(p2.path, '+31641044153') |
| 938 | |
| 939 | def test_telurl_params(self): |
| 940 | p1 = urllib.parse.urlparse('tel:123-4;phone-context=+1-650-516') |
| 941 | self.assertEqual(p1.scheme, 'tel') |
| 942 | self.assertEqual(p1.path, '123-4') |
| 943 | self.assertEqual(p1.params, 'phone-context=+1-650-516') |
| 944 | |
| 945 | p1 = urllib.parse.urlparse('tel:+1-201-555-0123') |
| 946 | self.assertEqual(p1.scheme, 'tel') |
| 947 | self.assertEqual(p1.path, '+1-201-555-0123') |
| 948 | self.assertEqual(p1.params, '') |
| 949 | |
| 950 | p1 = urllib.parse.urlparse('tel:7042;phone-context=example.com') |
| 951 | self.assertEqual(p1.scheme, 'tel') |
| 952 | self.assertEqual(p1.path, '7042') |
| 953 | self.assertEqual(p1.params, 'phone-context=example.com') |
| 954 | |
| 955 | p1 = urllib.parse.urlparse('tel:863-1234;phone-context=+1-914-555') |
| 956 | self.assertEqual(p1.scheme, 'tel') |
| 957 | self.assertEqual(p1.path, '863-1234') |
| 958 | self.assertEqual(p1.params, 'phone-context=+1-914-555') |
| 959 | |
R David Murray | f516388 | 2013-03-21 20:56:51 -0400 | [diff] [blame] | 960 | def test_Quoter_repr(self): |
| 961 | quoter = urllib.parse.Quoter(urllib.parse._ALWAYS_SAFE) |
| 962 | self.assertIn('Quoter', repr(quoter)) |
| 963 | |
Serhiy Storchaka | 1515450 | 2015-04-07 19:09:01 +0300 | [diff] [blame] | 964 | def test_all(self): |
| 965 | expected = [] |
| 966 | undocumented = { |
| 967 | 'splitattr', 'splithost', 'splitnport', 'splitpasswd', |
| 968 | 'splitport', 'splitquery', 'splittag', 'splittype', 'splituser', |
| 969 | 'splitvalue', |
| 970 | 'Quoter', 'ResultBase', 'clear_cache', 'to_bytes', 'unwrap', |
| 971 | } |
| 972 | for name in dir(urllib.parse): |
| 973 | if name.startswith('_') or name in undocumented: |
| 974 | continue |
| 975 | object = getattr(urllib.parse, name) |
| 976 | if getattr(object, '__module__', None) == 'urllib.parse': |
| 977 | expected.append(name) |
| 978 | self.assertCountEqual(urllib.parse.__all__, expected) |
| 979 | |
Senthil Kumaran | 6be85c5 | 2010-02-19 07:42:50 +0000 | [diff] [blame] | 980 | |
Serhiy Storchaka | 9270be7 | 2015-03-02 16:32:29 +0200 | [diff] [blame] | 981 | class Utility_Tests(unittest.TestCase): |
| 982 | """Testcase to test the various utility functions in the urllib.""" |
| 983 | # In Python 2 this test class was in test_urllib. |
| 984 | |
| 985 | def test_splittype(self): |
| 986 | splittype = urllib.parse.splittype |
| 987 | self.assertEqual(splittype('type:opaquestring'), ('type', 'opaquestring')) |
| 988 | self.assertEqual(splittype('opaquestring'), (None, 'opaquestring')) |
| 989 | self.assertEqual(splittype(':opaquestring'), (None, ':opaquestring')) |
| 990 | self.assertEqual(splittype('type:'), ('type', '')) |
| 991 | self.assertEqual(splittype('type:opaque:string'), ('type', 'opaque:string')) |
| 992 | |
| 993 | def test_splithost(self): |
| 994 | splithost = urllib.parse.splithost |
| 995 | self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'), |
| 996 | ('www.example.org:80', '/foo/bar/baz.html')) |
| 997 | self.assertEqual(splithost('//www.example.org:80'), |
| 998 | ('www.example.org:80', '')) |
| 999 | self.assertEqual(splithost('/foo/bar/baz.html'), |
| 1000 | (None, '/foo/bar/baz.html')) |
| 1001 | |
postmasters | 90e01e5 | 2017-06-20 06:02:44 -0700 | [diff] [blame] | 1002 | # bpo-30500: # starts a fragment. |
| 1003 | self.assertEqual(splithost('//127.0.0.1#@host.com'), |
| 1004 | ('127.0.0.1', '/#@host.com')) |
| 1005 | self.assertEqual(splithost('//127.0.0.1#@host.com:80'), |
| 1006 | ('127.0.0.1', '/#@host.com:80')) |
| 1007 | self.assertEqual(splithost('//127.0.0.1:80#@host.com'), |
| 1008 | ('127.0.0.1:80', '/#@host.com')) |
| 1009 | |
| 1010 | # Empty host is returned as empty string. |
| 1011 | self.assertEqual(splithost("///file"), |
| 1012 | ('', '/file')) |
| 1013 | |
| 1014 | # Trailing semicolon, question mark and hash symbol are kept. |
| 1015 | self.assertEqual(splithost("//example.net/file;"), |
| 1016 | ('example.net', '/file;')) |
| 1017 | self.assertEqual(splithost("//example.net/file?"), |
| 1018 | ('example.net', '/file?')) |
| 1019 | self.assertEqual(splithost("//example.net/file#"), |
| 1020 | ('example.net', '/file#')) |
| 1021 | |
Serhiy Storchaka | 9270be7 | 2015-03-02 16:32:29 +0200 | [diff] [blame] | 1022 | def test_splituser(self): |
| 1023 | splituser = urllib.parse.splituser |
| 1024 | self.assertEqual(splituser('User:Pass@www.python.org:080'), |
| 1025 | ('User:Pass', 'www.python.org:080')) |
| 1026 | self.assertEqual(splituser('@www.python.org:080'), |
| 1027 | ('', 'www.python.org:080')) |
| 1028 | self.assertEqual(splituser('www.python.org:080'), |
| 1029 | (None, 'www.python.org:080')) |
| 1030 | self.assertEqual(splituser('User:Pass@'), |
| 1031 | ('User:Pass', '')) |
| 1032 | self.assertEqual(splituser('User@example.com:Pass@www.python.org:080'), |
| 1033 | ('User@example.com:Pass', 'www.python.org:080')) |
| 1034 | |
| 1035 | def test_splitpasswd(self): |
| 1036 | # Some of the password examples are not sensible, but it is added to |
| 1037 | # confirming to RFC2617 and addressing issue4675. |
| 1038 | splitpasswd = urllib.parse.splitpasswd |
| 1039 | self.assertEqual(splitpasswd('user:ab'), ('user', 'ab')) |
| 1040 | self.assertEqual(splitpasswd('user:a\nb'), ('user', 'a\nb')) |
| 1041 | self.assertEqual(splitpasswd('user:a\tb'), ('user', 'a\tb')) |
| 1042 | self.assertEqual(splitpasswd('user:a\rb'), ('user', 'a\rb')) |
| 1043 | self.assertEqual(splitpasswd('user:a\fb'), ('user', 'a\fb')) |
| 1044 | self.assertEqual(splitpasswd('user:a\vb'), ('user', 'a\vb')) |
| 1045 | self.assertEqual(splitpasswd('user:a:b'), ('user', 'a:b')) |
| 1046 | self.assertEqual(splitpasswd('user:a b'), ('user', 'a b')) |
| 1047 | self.assertEqual(splitpasswd('user 2:ab'), ('user 2', 'ab')) |
| 1048 | self.assertEqual(splitpasswd('user+1:a+b'), ('user+1', 'a+b')) |
| 1049 | self.assertEqual(splitpasswd('user:'), ('user', '')) |
| 1050 | self.assertEqual(splitpasswd('user'), ('user', None)) |
| 1051 | self.assertEqual(splitpasswd(':ab'), ('', 'ab')) |
| 1052 | |
| 1053 | def test_splitport(self): |
| 1054 | splitport = urllib.parse.splitport |
| 1055 | self.assertEqual(splitport('parrot:88'), ('parrot', '88')) |
| 1056 | self.assertEqual(splitport('parrot'), ('parrot', None)) |
| 1057 | self.assertEqual(splitport('parrot:'), ('parrot', None)) |
| 1058 | self.assertEqual(splitport('127.0.0.1'), ('127.0.0.1', None)) |
| 1059 | self.assertEqual(splitport('parrot:cheese'), ('parrot:cheese', None)) |
| 1060 | self.assertEqual(splitport('[::1]:88'), ('[::1]', '88')) |
| 1061 | self.assertEqual(splitport('[::1]'), ('[::1]', None)) |
| 1062 | self.assertEqual(splitport(':88'), ('', '88')) |
| 1063 | |
| 1064 | def test_splitnport(self): |
| 1065 | splitnport = urllib.parse.splitnport |
| 1066 | self.assertEqual(splitnport('parrot:88'), ('parrot', 88)) |
| 1067 | self.assertEqual(splitnport('parrot'), ('parrot', -1)) |
| 1068 | self.assertEqual(splitnport('parrot', 55), ('parrot', 55)) |
| 1069 | self.assertEqual(splitnport('parrot:'), ('parrot', -1)) |
| 1070 | self.assertEqual(splitnport('parrot:', 55), ('parrot', 55)) |
| 1071 | self.assertEqual(splitnport('127.0.0.1'), ('127.0.0.1', -1)) |
| 1072 | self.assertEqual(splitnport('127.0.0.1', 55), ('127.0.0.1', 55)) |
| 1073 | self.assertEqual(splitnport('parrot:cheese'), ('parrot', None)) |
| 1074 | self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None)) |
| 1075 | |
| 1076 | def test_splitquery(self): |
| 1077 | # Normal cases are exercised by other tests; ensure that we also |
| 1078 | # catch cases with no port specified (testcase ensuring coverage) |
| 1079 | splitquery = urllib.parse.splitquery |
| 1080 | self.assertEqual(splitquery('http://python.org/fake?foo=bar'), |
| 1081 | ('http://python.org/fake', 'foo=bar')) |
| 1082 | self.assertEqual(splitquery('http://python.org/fake?foo=bar?'), |
| 1083 | ('http://python.org/fake?foo=bar', '')) |
| 1084 | self.assertEqual(splitquery('http://python.org/fake'), |
| 1085 | ('http://python.org/fake', None)) |
| 1086 | self.assertEqual(splitquery('?foo=bar'), ('', 'foo=bar')) |
| 1087 | |
| 1088 | def test_splittag(self): |
| 1089 | splittag = urllib.parse.splittag |
| 1090 | self.assertEqual(splittag('http://example.com?foo=bar#baz'), |
| 1091 | ('http://example.com?foo=bar', 'baz')) |
| 1092 | self.assertEqual(splittag('http://example.com?foo=bar#'), |
| 1093 | ('http://example.com?foo=bar', '')) |
| 1094 | self.assertEqual(splittag('#baz'), ('', 'baz')) |
| 1095 | self.assertEqual(splittag('http://example.com?foo=bar'), |
| 1096 | ('http://example.com?foo=bar', None)) |
| 1097 | self.assertEqual(splittag('http://example.com?foo=bar#baz#boo'), |
| 1098 | ('http://example.com?foo=bar#baz', 'boo')) |
| 1099 | |
| 1100 | def test_splitattr(self): |
| 1101 | splitattr = urllib.parse.splitattr |
| 1102 | self.assertEqual(splitattr('/path;attr1=value1;attr2=value2'), |
| 1103 | ('/path', ['attr1=value1', 'attr2=value2'])) |
| 1104 | self.assertEqual(splitattr('/path;'), ('/path', [''])) |
| 1105 | self.assertEqual(splitattr(';attr1=value1;attr2=value2'), |
| 1106 | ('', ['attr1=value1', 'attr2=value2'])) |
| 1107 | self.assertEqual(splitattr('/path'), ('/path', [])) |
| 1108 | |
| 1109 | def test_splitvalue(self): |
| 1110 | # Normal cases are exercised by other tests; test pathological cases |
| 1111 | # with no key/value pairs. (testcase ensuring coverage) |
| 1112 | splitvalue = urllib.parse.splitvalue |
| 1113 | self.assertEqual(splitvalue('foo=bar'), ('foo', 'bar')) |
| 1114 | self.assertEqual(splitvalue('foo='), ('foo', '')) |
| 1115 | self.assertEqual(splitvalue('=bar'), ('', 'bar')) |
| 1116 | self.assertEqual(splitvalue('foobar'), ('foobar', None)) |
| 1117 | self.assertEqual(splitvalue('foo=bar=baz'), ('foo', 'bar=baz')) |
| 1118 | |
| 1119 | def test_to_bytes(self): |
| 1120 | result = urllib.parse.to_bytes('http://www.python.org') |
| 1121 | self.assertEqual(result, 'http://www.python.org') |
| 1122 | self.assertRaises(UnicodeError, urllib.parse.to_bytes, |
| 1123 | 'http://www.python.org/medi\u00e6val') |
| 1124 | |
| 1125 | def test_unwrap(self): |
| 1126 | url = urllib.parse.unwrap('<URL:type://host/path>') |
| 1127 | self.assertEqual(url, 'type://host/path') |
| 1128 | |
Skip Montanaro | 6ec967d | 2002-03-23 05:32:10 +0000 | [diff] [blame] | 1129 | |
| 1130 | if __name__ == "__main__": |
Serhiy Storchaka | 9270be7 | 2015-03-02 16:32:29 +0200 | [diff] [blame] | 1131 | unittest.main() |