Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 1 | import io |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 2 | import os |
Antoine Pitrou | a6a4dc8 | 2017-09-07 18:56:24 +0200 | [diff] [blame] | 3 | import threading |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 4 | import unittest |
| 5 | import urllib.robotparser |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 6 | from collections import namedtuple |
Benjamin Peterson | ee8712c | 2008-05-20 21:35:26 +0000 | [diff] [blame] | 7 | from test import support |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 8 | from http.server import BaseHTTPRequestHandler, HTTPServer |
| 9 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 10 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 11 | class BaseRobotTest: |
| 12 | robots_txt = '' |
| 13 | agent = 'test_robotparser' |
| 14 | good = [] |
| 15 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 16 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 17 | def setUp(self): |
| 18 | lines = io.StringIO(self.robots_txt).readlines() |
| 19 | self.parser = urllib.robotparser.RobotFileParser() |
| 20 | self.parser.parse(lines) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 21 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 22 | def get_agent_and_url(self, url): |
| 23 | if isinstance(url, tuple): |
| 24 | agent, url = url |
| 25 | return agent, url |
| 26 | return self.agent, url |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 27 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 28 | def test_good_urls(self): |
| 29 | for url in self.good: |
| 30 | agent, url = self.get_agent_and_url(url) |
| 31 | with self.subTest(url=url, agent=agent): |
| 32 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 33 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 34 | def test_bad_urls(self): |
| 35 | for url in self.bad: |
| 36 | agent, url = self.get_agent_and_url(url) |
| 37 | with self.subTest(url=url, agent=agent): |
| 38 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 39 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 40 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 41 | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 42 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 43 | User-agent: * |
| 44 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 45 | Disallow: /tmp/ # these will soon disappear |
| 46 | Disallow: /foo.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 47 | """ |
| 48 | good = ['/', '/test.html'] |
| 49 | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 50 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 51 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 52 | class CrawlDelayAndCustomAgentTest(BaseRobotTest, unittest.TestCase): |
| 53 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 54 | # robots.txt for http://www.example.com/ |
| 55 | |
| 56 | User-agent: * |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 57 | Crawl-delay: 1 |
| 58 | Request-rate: 3/15 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 59 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 60 | |
| 61 | # Cybermapper knows where to go. |
| 62 | User-agent: cybermapper |
| 63 | Disallow: |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 64 | """ |
| 65 | good = ['/', '/test.html', ('cybermapper', '/cyberworld/map/index.html')] |
| 66 | bad = ['/cyberworld/map/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 67 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 68 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 69 | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
| 70 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 71 | # go away |
| 72 | User-agent: * |
| 73 | Disallow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 74 | """ |
| 75 | good = [] |
| 76 | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 77 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 78 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 79 | class BaseRequestRateTest(BaseRobotTest): |
| 80 | |
| 81 | def test_request_rate(self): |
| 82 | for url in self.good + self.bad: |
| 83 | agent, url = self.get_agent_and_url(url) |
| 84 | with self.subTest(url=url, agent=agent): |
| 85 | if self.crawl_delay: |
| 86 | self.assertEqual( |
| 87 | self.parser.crawl_delay(agent), self.crawl_delay |
| 88 | ) |
| 89 | if self.request_rate: |
| 90 | self.assertEqual( |
| 91 | self.parser.request_rate(agent).requests, |
| 92 | self.request_rate.requests |
| 93 | ) |
| 94 | self.assertEqual( |
| 95 | self.parser.request_rate(agent).seconds, |
| 96 | self.request_rate.seconds |
| 97 | ) |
| 98 | |
| 99 | |
| 100 | class CrawlDelayAndRequestRateTest(BaseRequestRateTest, unittest.TestCase): |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 101 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 102 | User-agent: figtree |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 103 | Crawl-delay: 3 |
| 104 | Request-rate: 9/30 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 105 | Disallow: /tmp |
| 106 | Disallow: /a%3cd.html |
| 107 | Disallow: /a%2fb.html |
| 108 | Disallow: /%7ejoe/index.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 109 | """ |
| 110 | agent = 'figtree' |
| 111 | request_rate = namedtuple('req_rate', 'requests seconds')(9, 30) |
| 112 | crawl_delay = 3 |
| 113 | good = [('figtree', '/foo.html')] |
| 114 | bad = ['/tmp', '/tmp.html', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', |
| 115 | '/a%2fb.html', '/~joe/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 116 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 117 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 118 | class DifferentAgentTest(CrawlDelayAndRequestRateTest): |
| 119 | agent = 'FigTree Robot libwww-perl/5.04' |
| 120 | # these are not actually tested, but we still need to parse it |
| 121 | # in order to accommodate the input parameters |
| 122 | request_rate = None |
| 123 | crawl_delay = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 124 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 125 | |
| 126 | class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 127 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 128 | User-agent: * |
| 129 | Disallow: /tmp/ |
| 130 | Disallow: /a%3Cd.html |
| 131 | Disallow: /a/b.html |
| 132 | Disallow: /%7ejoe/index.html |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 133 | Crawl-delay: 3 |
| 134 | Request-rate: 9/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 135 | """ |
| 136 | good = ['/tmp'] |
| 137 | bad = ['/tmp/', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', '/a/b.html', |
| 138 | '/%7Ejoe/index.html'] |
| 139 | crawl_delay = 3 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 140 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 141 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 142 | class InvalidCrawlDelayTest(BaseRobotTest, unittest.TestCase): |
| 143 | # From bug report #523041 |
| 144 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 145 | User-Agent: * |
| 146 | Disallow: /. |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 147 | Crawl-delay: pears |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 148 | """ |
| 149 | good = ['/foo.html'] |
| 150 | # bug report says "/" should be denied, but that is not in the RFC |
| 151 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 152 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 153 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 154 | class AnotherInvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 155 | # also test that Allow and Diasallow works well with each other |
| 156 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 157 | User-agent: Googlebot |
| 158 | Allow: /folder1/myfile.html |
| 159 | Disallow: /folder1/ |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 160 | Request-rate: whale/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 161 | """ |
| 162 | agent = 'Googlebot' |
| 163 | good = ['/folder1/myfile.html'] |
| 164 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 165 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 166 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 167 | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
| 168 | # the order of User-agent should be correct. note |
| 169 | # that this file is incorrect because "Googlebot" is a |
| 170 | # substring of "Googlebot-Mobile" |
| 171 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 172 | User-agent: Googlebot |
| 173 | Disallow: / |
| 174 | |
| 175 | User-agent: Googlebot-Mobile |
| 176 | Allow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 177 | """ |
| 178 | agent = 'Googlebot' |
| 179 | bad = ['/something.jpg'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 180 | |
| 181 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 182 | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
| 183 | agent = 'Googlebot-Mobile' |
| 184 | |
| 185 | |
| 186 | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
| 187 | # Google also got the order wrong. You need |
| 188 | # to specify the URLs from more specific to more general |
| 189 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 190 | User-agent: Googlebot |
| 191 | Allow: /folder1/myfile.html |
| 192 | Disallow: /folder1/ |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 193 | """ |
| 194 | agent = 'googlebot' |
| 195 | good = ['/folder1/myfile.html'] |
| 196 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 197 | |
| 198 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 199 | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 200 | # see issue #6325 for details |
| 201 | robots_txt = """\ |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 202 | User-agent: * |
| 203 | Disallow: /some/path?name=value |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 204 | """ |
| 205 | good = ['/some/path'] |
| 206 | bad = ['/some/path?name=value'] |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 207 | |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 208 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 209 | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 210 | # obey first * entry (#4108) |
| 211 | robots_txt = """\ |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 212 | User-agent: * |
| 213 | Disallow: /some/path |
| 214 | |
| 215 | User-agent: * |
| 216 | Disallow: /another/path |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 217 | """ |
| 218 | good = ['/another/path'] |
| 219 | bad = ['/some/path'] |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 220 | |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 221 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 222 | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 223 | # normalize the URL first (#17403) |
| 224 | robots_txt = """\ |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 225 | User-agent: * |
| 226 | Allow: /some/path? |
| 227 | Disallow: /another/path? |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 228 | """ |
| 229 | good = ['/some/path?'] |
| 230 | bad = ['/another/path?'] |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 231 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 232 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 233 | class DefaultEntryTest(BaseRequestRateTest, unittest.TestCase): |
| 234 | robots_txt = """\ |
| 235 | User-agent: * |
| 236 | Crawl-delay: 1 |
| 237 | Request-rate: 3/15 |
| 238 | Disallow: /cyberworld/map/ |
| 239 | """ |
| 240 | request_rate = namedtuple('req_rate', 'requests seconds')(3, 15) |
| 241 | crawl_delay = 1 |
| 242 | good = ['/', '/test.html'] |
| 243 | bad = ['/cyberworld/map/index.html'] |
| 244 | |
| 245 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 246 | class RobotHandler(BaseHTTPRequestHandler): |
| 247 | |
| 248 | def do_GET(self): |
| 249 | self.send_error(403, "Forbidden access") |
| 250 | |
| 251 | def log_message(self, format, *args): |
| 252 | pass |
| 253 | |
| 254 | |
| 255 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 256 | |
| 257 | def setUp(self): |
| 258 | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
| 259 | |
| 260 | self.t = threading.Thread( |
| 261 | name='HTTPServer serving', |
| 262 | target=self.server.serve_forever, |
| 263 | # Short poll interval to make the test finish quickly. |
| 264 | # Time between requests is short enough that we won't wake |
| 265 | # up spuriously too many times. |
| 266 | kwargs={'poll_interval':0.01}) |
| 267 | self.t.daemon = True # In case this function raises. |
| 268 | self.t.start() |
| 269 | |
| 270 | def tearDown(self): |
| 271 | self.server.shutdown() |
| 272 | self.t.join() |
| 273 | self.server.server_close() |
| 274 | |
Berker Peksag | 2a9f5ed | 2016-09-11 15:17:53 +0300 | [diff] [blame] | 275 | @support.reap_threads |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 276 | def testPasswordProtectedSite(self): |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 277 | addr = self.server.server_address |
| 278 | url = 'http://' + support.HOST + ':' + str(addr[1]) |
| 279 | robots_url = url + "/robots.txt" |
| 280 | parser = urllib.robotparser.RobotFileParser() |
| 281 | parser.set_url(url) |
| 282 | parser.read() |
| 283 | self.assertFalse(parser.can_fetch("*", robots_url)) |
| 284 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 285 | |
| 286 | class NetworkTestCase(unittest.TestCase): |
Thomas Wouters | 47b49bf | 2007-08-30 22:15:33 +0000 | [diff] [blame] | 287 | |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 288 | base_url = 'http://www.pythontest.net/' |
| 289 | robots_txt = '{}elsewhere/robots.txt'.format(base_url) |
| 290 | |
| 291 | @classmethod |
| 292 | def setUpClass(cls): |
Florent Xicluna | 41fe615 | 2010-04-02 18:52:12 +0000 | [diff] [blame] | 293 | support.requires('network') |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 294 | with support.transient_internet(cls.base_url): |
| 295 | cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt) |
| 296 | cls.parser.read() |
| 297 | |
| 298 | def url(self, path): |
| 299 | return '{}{}{}'.format( |
| 300 | self.base_url, path, '/' if not os.path.splitext(path)[1] else '' |
| 301 | ) |
| 302 | |
| 303 | def test_basic(self): |
| 304 | self.assertFalse(self.parser.disallow_all) |
| 305 | self.assertFalse(self.parser.allow_all) |
| 306 | self.assertGreater(self.parser.mtime(), 0) |
| 307 | self.assertFalse(self.parser.crawl_delay('*')) |
| 308 | self.assertFalse(self.parser.request_rate('*')) |
| 309 | |
| 310 | def test_can_fetch(self): |
| 311 | self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere'))) |
| 312 | self.assertFalse(self.parser.can_fetch('Nutch', self.base_url)) |
| 313 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian'))) |
| 314 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats'))) |
| 315 | self.assertFalse(self.parser.can_fetch('*', self.url('webstats'))) |
| 316 | self.assertTrue(self.parser.can_fetch('*', self.base_url)) |
| 317 | |
| 318 | def test_read_404(self): |
| 319 | parser = urllib.robotparser.RobotFileParser(self.url('i-robot.txt')) |
| 320 | parser.read() |
| 321 | self.assertTrue(parser.allow_all) |
| 322 | self.assertFalse(parser.disallow_all) |
| 323 | self.assertEqual(parser.mtime(), 0) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 324 | self.assertIsNone(parser.crawl_delay('*')) |
| 325 | self.assertIsNone(parser.request_rate('*')) |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 326 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 327 | if __name__=='__main__': |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 328 | unittest.main() |