Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 1 | import io |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 2 | import unittest |
| 3 | import urllib.robotparser |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 4 | from collections import namedtuple |
Antoine Pitrou | 95531ea | 2011-07-08 19:43:51 +0200 | [diff] [blame] | 5 | from urllib.error import URLError, HTTPError |
| 6 | from urllib.request import urlopen |
Benjamin Peterson | ee8712c | 2008-05-20 21:35:26 +0000 | [diff] [blame] | 7 | from test import support |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 8 | from http.server import BaseHTTPRequestHandler, HTTPServer |
Berker Peksag | ad324f6 | 2014-06-29 15:54:56 +0300 | [diff] [blame] | 9 | try: |
| 10 | import threading |
| 11 | except ImportError: |
| 12 | threading = None |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 13 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 14 | |
| 15 | class RobotTestCase(unittest.TestCase): |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 16 | def __init__(self, index=None, parser=None, url=None, good=None, |
| 17 | agent=None, request_rate=None, crawl_delay=None): |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 18 | # workaround to make unittest discovery work (see #17066) |
| 19 | if not isinstance(index, int): |
| 20 | return |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 21 | unittest.TestCase.__init__(self) |
| 22 | if good: |
| 23 | self.str = "RobotTest(%d, good, %s)" % (index, url) |
| 24 | else: |
| 25 | self.str = "RobotTest(%d, bad, %s)" % (index, url) |
| 26 | self.parser = parser |
| 27 | self.url = url |
| 28 | self.good = good |
| 29 | self.agent = agent |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 30 | self.request_rate = request_rate |
| 31 | self.crawl_delay = crawl_delay |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 32 | |
| 33 | def runTest(self): |
| 34 | if isinstance(self.url, tuple): |
| 35 | agent, url = self.url |
| 36 | else: |
| 37 | url = self.url |
| 38 | agent = self.agent |
| 39 | if self.good: |
Benjamin Peterson | c9c0f20 | 2009-06-30 23:06:06 +0000 | [diff] [blame] | 40 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 41 | self.assertEqual(self.parser.crawl_delay(agent), self.crawl_delay) |
| 42 | # if we have actual values for request rate |
| 43 | if self.request_rate and self.parser.request_rate(agent): |
| 44 | self.assertEqual( |
| 45 | self.parser.request_rate(agent).requests, |
| 46 | self.request_rate.requests |
| 47 | ) |
| 48 | self.assertEqual( |
| 49 | self.parser.request_rate(agent).seconds, |
| 50 | self.request_rate.seconds |
| 51 | ) |
| 52 | self.assertEqual(self.parser.request_rate(agent), self.request_rate) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 53 | else: |
Benjamin Peterson | c9c0f20 | 2009-06-30 23:06:06 +0000 | [diff] [blame] | 54 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 55 | |
| 56 | def __str__(self): |
| 57 | return self.str |
| 58 | |
| 59 | tests = unittest.TestSuite() |
| 60 | |
| 61 | def RobotTest(index, robots_txt, good_urls, bad_urls, |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 62 | request_rate, crawl_delay, agent="test_robotparser"): |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 63 | |
Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 64 | lines = io.StringIO(robots_txt).readlines() |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 65 | parser = urllib.robotparser.RobotFileParser() |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 66 | parser.parse(lines) |
| 67 | for url in good_urls: |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 68 | tests.addTest(RobotTestCase(index, parser, url, 1, agent, |
| 69 | request_rate, crawl_delay)) |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 70 | for url in bad_urls: |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 71 | tests.addTest(RobotTestCase(index, parser, url, 0, agent, |
| 72 | request_rate, crawl_delay)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 73 | |
| 74 | # Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002) |
| 75 | |
| 76 | # 1. |
| 77 | doc = """ |
| 78 | User-agent: * |
| 79 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 80 | Disallow: /tmp/ # these will soon disappear |
| 81 | Disallow: /foo.html |
| 82 | """ |
| 83 | |
| 84 | good = ['/','/test.html'] |
| 85 | bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 86 | request_rate = None |
| 87 | crawl_delay = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 88 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 89 | RobotTest(1, doc, good, bad, request_rate, crawl_delay) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 90 | |
| 91 | # 2. |
| 92 | doc = """ |
| 93 | # robots.txt for http://www.example.com/ |
| 94 | |
| 95 | User-agent: * |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 96 | Crawl-delay: 1 |
| 97 | Request-rate: 3/15 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 98 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 99 | |
| 100 | # Cybermapper knows where to go. |
| 101 | User-agent: cybermapper |
| 102 | Disallow: |
| 103 | |
| 104 | """ |
| 105 | |
| 106 | good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')] |
| 107 | bad = ['/cyberworld/map/index.html'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 108 | request_rate = None # The parameters should be equal to None since they |
| 109 | crawl_delay = None # don't apply to the cybermapper user agent |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 110 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 111 | RobotTest(2, doc, good, bad, request_rate, crawl_delay) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 112 | |
| 113 | # 3. |
| 114 | doc = """ |
| 115 | # go away |
| 116 | User-agent: * |
| 117 | Disallow: / |
| 118 | """ |
| 119 | |
| 120 | good = [] |
| 121 | bad = ['/cyberworld/map/index.html','/','/tmp/'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 122 | request_rate = None |
| 123 | crawl_delay = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 124 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 125 | RobotTest(3, doc, good, bad, request_rate, crawl_delay) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 126 | |
| 127 | # Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002) |
| 128 | |
| 129 | # 4. |
| 130 | doc = """ |
| 131 | User-agent: figtree |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 132 | Crawl-delay: 3 |
| 133 | Request-rate: 9/30 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 134 | Disallow: /tmp |
| 135 | Disallow: /a%3cd.html |
| 136 | Disallow: /a%2fb.html |
| 137 | Disallow: /%7ejoe/index.html |
| 138 | """ |
| 139 | |
| 140 | good = [] # XFAIL '/a/b.html' |
| 141 | bad = ['/tmp','/tmp.html','/tmp/a.html', |
| 142 | '/a%3cd.html','/a%3Cd.html','/a%2fb.html', |
| 143 | '/~joe/index.html' |
| 144 | ] |
| 145 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 146 | request_rate = namedtuple('req_rate', 'requests seconds') |
| 147 | request_rate.requests = 9 |
| 148 | request_rate.seconds = 30 |
| 149 | crawl_delay = 3 |
| 150 | request_rate_bad = None # not actually tested, but we still need to parse it |
| 151 | crawl_delay_bad = None # in order to accommodate the input parameters |
| 152 | |
| 153 | |
| 154 | RobotTest(4, doc, good, bad, request_rate, crawl_delay, 'figtree' ) |
| 155 | RobotTest(5, doc, good, bad, request_rate_bad, crawl_delay_bad, |
| 156 | 'FigTree Robot libwww-perl/5.04') |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 157 | |
| 158 | # 6. |
| 159 | doc = """ |
| 160 | User-agent: * |
| 161 | Disallow: /tmp/ |
| 162 | Disallow: /a%3Cd.html |
| 163 | Disallow: /a/b.html |
| 164 | Disallow: /%7ejoe/index.html |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 165 | Crawl-delay: 3 |
| 166 | Request-rate: 9/banana |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 167 | """ |
| 168 | |
| 169 | good = ['/tmp',] # XFAIL: '/a%2fb.html' |
| 170 | bad = ['/tmp/','/tmp/a.html', |
| 171 | '/a%3cd.html','/a%3Cd.html',"/a/b.html", |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 172 | '/%7Ejoe/index.html'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 173 | crawl_delay = 3 |
| 174 | request_rate = None # since request rate has invalid syntax, return None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 175 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 176 | RobotTest(6, doc, good, bad, None, None) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 177 | |
| 178 | # From bug report #523041 |
| 179 | |
| 180 | # 7. |
| 181 | doc = """ |
| 182 | User-Agent: * |
| 183 | Disallow: /. |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 184 | Crawl-delay: pears |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 185 | """ |
| 186 | |
| 187 | good = ['/foo.html'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 188 | bad = [] # bug report says "/" should be denied, but that is not in the RFC |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 189 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 190 | crawl_delay = None # since crawl delay has invalid syntax, return None |
| 191 | request_rate = None |
| 192 | |
| 193 | RobotTest(7, doc, good, bad, crawl_delay, request_rate) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 194 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 195 | # From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364 |
| 196 | |
| 197 | # 8. |
| 198 | doc = """ |
| 199 | User-agent: Googlebot |
| 200 | Allow: /folder1/myfile.html |
| 201 | Disallow: /folder1/ |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 202 | Request-rate: whale/banana |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 203 | """ |
| 204 | |
| 205 | good = ['/folder1/myfile.html'] |
| 206 | bad = ['/folder1/anotherfile.html'] |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 207 | crawl_delay = None |
| 208 | request_rate = None # invalid syntax, return none |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 209 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 210 | RobotTest(8, doc, good, bad, crawl_delay, request_rate, agent="Googlebot") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 211 | |
| 212 | # 9. This file is incorrect because "Googlebot" is a substring of |
| 213 | # "Googlebot-Mobile", so test 10 works just like test 9. |
| 214 | doc = """ |
| 215 | User-agent: Googlebot |
| 216 | Disallow: / |
| 217 | |
| 218 | User-agent: Googlebot-Mobile |
| 219 | Allow: / |
| 220 | """ |
| 221 | |
| 222 | good = [] |
| 223 | bad = ['/something.jpg'] |
| 224 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 225 | RobotTest(9, doc, good, bad, None, None, agent="Googlebot") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 226 | |
| 227 | good = [] |
| 228 | bad = ['/something.jpg'] |
| 229 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 230 | RobotTest(10, doc, good, bad, None, None, agent="Googlebot-Mobile") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 231 | |
| 232 | # 11. Get the order correct. |
| 233 | doc = """ |
| 234 | User-agent: Googlebot-Mobile |
| 235 | Allow: / |
| 236 | |
| 237 | User-agent: Googlebot |
| 238 | Disallow: / |
| 239 | """ |
| 240 | |
| 241 | good = [] |
| 242 | bad = ['/something.jpg'] |
| 243 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 244 | RobotTest(11, doc, good, bad, None, None, agent="Googlebot") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 245 | |
| 246 | good = ['/something.jpg'] |
| 247 | bad = [] |
| 248 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 249 | RobotTest(12, doc, good, bad, None, None, agent="Googlebot-Mobile") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 250 | |
| 251 | |
| 252 | # 13. Google also got the order wrong in #8. You need to specify the |
| 253 | # URLs from more specific to more general. |
| 254 | doc = """ |
| 255 | User-agent: Googlebot |
| 256 | Allow: /folder1/myfile.html |
| 257 | Disallow: /folder1/ |
| 258 | """ |
| 259 | |
| 260 | good = ['/folder1/myfile.html'] |
| 261 | bad = ['/folder1/anotherfile.html'] |
| 262 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 263 | RobotTest(13, doc, good, bad, None, None, agent="googlebot") |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 264 | |
| 265 | |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 266 | # 14. For issue #6325 (query string support) |
| 267 | doc = """ |
| 268 | User-agent: * |
| 269 | Disallow: /some/path?name=value |
| 270 | """ |
| 271 | |
| 272 | good = ['/some/path'] |
| 273 | bad = ['/some/path?name=value'] |
| 274 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 275 | RobotTest(14, doc, good, bad, None, None) |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 276 | |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 277 | # 15. For issue #4108 (obey first * entry) |
| 278 | doc = """ |
| 279 | User-agent: * |
| 280 | Disallow: /some/path |
| 281 | |
| 282 | User-agent: * |
| 283 | Disallow: /another/path |
| 284 | """ |
| 285 | |
| 286 | good = ['/another/path'] |
| 287 | bad = ['/some/path'] |
| 288 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 289 | RobotTest(15, doc, good, bad, None, None) |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 290 | |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 291 | # 16. Empty query (issue #17403). Normalizing the url first. |
| 292 | doc = """ |
| 293 | User-agent: * |
| 294 | Allow: /some/path? |
| 295 | Disallow: /another/path? |
| 296 | """ |
| 297 | |
| 298 | good = ['/some/path?'] |
| 299 | bad = ['/another/path?'] |
| 300 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 301 | RobotTest(16, doc, good, bad, None, None) |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 302 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 303 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 304 | class RobotHandler(BaseHTTPRequestHandler): |
| 305 | |
| 306 | def do_GET(self): |
| 307 | self.send_error(403, "Forbidden access") |
| 308 | |
| 309 | def log_message(self, format, *args): |
| 310 | pass |
| 311 | |
| 312 | |
Berker Peksag | ad324f6 | 2014-06-29 15:54:56 +0300 | [diff] [blame] | 313 | @unittest.skipUnless(threading, 'threading required for this test') |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 314 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 315 | |
| 316 | def setUp(self): |
| 317 | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
| 318 | |
| 319 | self.t = threading.Thread( |
| 320 | name='HTTPServer serving', |
| 321 | target=self.server.serve_forever, |
| 322 | # Short poll interval to make the test finish quickly. |
| 323 | # Time between requests is short enough that we won't wake |
| 324 | # up spuriously too many times. |
| 325 | kwargs={'poll_interval':0.01}) |
| 326 | self.t.daemon = True # In case this function raises. |
| 327 | self.t.start() |
| 328 | |
| 329 | def tearDown(self): |
| 330 | self.server.shutdown() |
| 331 | self.t.join() |
| 332 | self.server.server_close() |
| 333 | |
| 334 | def runTest(self): |
| 335 | self.testPasswordProtectedSite() |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 336 | |
| 337 | def testPasswordProtectedSite(self): |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 338 | addr = self.server.server_address |
| 339 | url = 'http://' + support.HOST + ':' + str(addr[1]) |
| 340 | robots_url = url + "/robots.txt" |
| 341 | parser = urllib.robotparser.RobotFileParser() |
| 342 | parser.set_url(url) |
| 343 | parser.read() |
| 344 | self.assertFalse(parser.can_fetch("*", robots_url)) |
| 345 | |
| 346 | def __str__(self): |
| 347 | return '%s' % self.__class__.__name__ |
| 348 | |
| 349 | class NetworkTestCase(unittest.TestCase): |
Thomas Wouters | 47b49bf | 2007-08-30 22:15:33 +0000 | [diff] [blame] | 350 | |
Georg Brandl | 89e5671 | 2014-02-23 08:45:15 +0100 | [diff] [blame] | 351 | @unittest.skip('does not handle the gzip encoding delivered by pydotorg') |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 352 | def testPythonOrg(self): |
Florent Xicluna | 41fe615 | 2010-04-02 18:52:12 +0000 | [diff] [blame] | 353 | support.requires('network') |
Antoine Pitrou | 8bc0903 | 2010-09-07 21:09:09 +0000 | [diff] [blame] | 354 | with support.transient_internet('www.python.org'): |
| 355 | parser = urllib.robotparser.RobotFileParser( |
| 356 | "http://www.python.org/robots.txt") |
| 357 | parser.read() |
| 358 | self.assertTrue( |
| 359 | parser.can_fetch("*", "http://www.python.org/robots.txt")) |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 360 | |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 361 | def load_tests(loader, suite, pattern): |
| 362 | suite = unittest.makeSuite(NetworkTestCase) |
| 363 | suite.addTest(tests) |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 364 | suite.addTest(PasswordProtectedSiteTestCase()) |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 365 | return suite |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 366 | |
| 367 | if __name__=='__main__': |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 368 | unittest.main() |