Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 1 | import io |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 2 | import unittest |
| 3 | import urllib.robotparser |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 4 | from collections import namedtuple |
Benjamin Peterson | ee8712c | 2008-05-20 21:35:26 +0000 | [diff] [blame] | 5 | from test import support |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 6 | from http.server import BaseHTTPRequestHandler, HTTPServer |
Berker Peksag | ad324f6 | 2014-06-29 15:54:56 +0300 | [diff] [blame] | 7 | try: |
| 8 | import threading |
| 9 | except ImportError: |
| 10 | threading = None |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 11 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 12 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 13 | class BaseRobotTest: |
| 14 | robots_txt = '' |
| 15 | agent = 'test_robotparser' |
| 16 | good = [] |
| 17 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 18 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 19 | def setUp(self): |
| 20 | lines = io.StringIO(self.robots_txt).readlines() |
| 21 | self.parser = urllib.robotparser.RobotFileParser() |
| 22 | self.parser.parse(lines) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 23 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 24 | def get_agent_and_url(self, url): |
| 25 | if isinstance(url, tuple): |
| 26 | agent, url = url |
| 27 | return agent, url |
| 28 | return self.agent, url |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 29 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 30 | def test_good_urls(self): |
| 31 | for url in self.good: |
| 32 | agent, url = self.get_agent_and_url(url) |
| 33 | with self.subTest(url=url, agent=agent): |
| 34 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 35 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 36 | def test_bad_urls(self): |
| 37 | for url in self.bad: |
| 38 | agent, url = self.get_agent_and_url(url) |
| 39 | with self.subTest(url=url, agent=agent): |
| 40 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 41 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 42 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 43 | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 44 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 45 | User-agent: * |
| 46 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 47 | Disallow: /tmp/ # these will soon disappear |
| 48 | Disallow: /foo.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 49 | """ |
| 50 | good = ['/', '/test.html'] |
| 51 | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 52 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 53 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 54 | class CrawlDelayAndCustomAgentTest(BaseRobotTest, unittest.TestCase): |
| 55 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 56 | # robots.txt for http://www.example.com/ |
| 57 | |
| 58 | User-agent: * |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 59 | Crawl-delay: 1 |
| 60 | Request-rate: 3/15 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 61 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 62 | |
| 63 | # Cybermapper knows where to go. |
| 64 | User-agent: cybermapper |
| 65 | Disallow: |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 66 | """ |
| 67 | good = ['/', '/test.html', ('cybermapper', '/cyberworld/map/index.html')] |
| 68 | bad = ['/cyberworld/map/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 69 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 70 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 71 | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
| 72 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 73 | # go away |
| 74 | User-agent: * |
| 75 | Disallow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 76 | """ |
| 77 | good = [] |
| 78 | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 79 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 80 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 81 | class CrawlDelayAndRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 82 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 83 | User-agent: figtree |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 84 | Crawl-delay: 3 |
| 85 | Request-rate: 9/30 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 86 | Disallow: /tmp |
| 87 | Disallow: /a%3cd.html |
| 88 | Disallow: /a%2fb.html |
| 89 | Disallow: /%7ejoe/index.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 90 | """ |
| 91 | agent = 'figtree' |
| 92 | request_rate = namedtuple('req_rate', 'requests seconds')(9, 30) |
| 93 | crawl_delay = 3 |
| 94 | good = [('figtree', '/foo.html')] |
| 95 | bad = ['/tmp', '/tmp.html', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', |
| 96 | '/a%2fb.html', '/~joe/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 97 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 98 | def test_request_rate(self): |
| 99 | for url in self.good: |
| 100 | agent, url = self.get_agent_and_url(url) |
| 101 | with self.subTest(url=url, agent=agent): |
| 102 | if self.crawl_delay: |
| 103 | self.assertEqual( |
| 104 | self.parser.crawl_delay(agent), self.crawl_delay |
| 105 | ) |
| 106 | if self.request_rate and self.parser.request_rate(agent): |
| 107 | self.assertEqual( |
| 108 | self.parser.request_rate(agent).requests, |
| 109 | self.request_rate.requests |
| 110 | ) |
| 111 | self.assertEqual( |
| 112 | self.parser.request_rate(agent).seconds, |
| 113 | self.request_rate.seconds |
| 114 | ) |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 115 | |
| 116 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 117 | class DifferentAgentTest(CrawlDelayAndRequestRateTest): |
| 118 | agent = 'FigTree Robot libwww-perl/5.04' |
| 119 | # these are not actually tested, but we still need to parse it |
| 120 | # in order to accommodate the input parameters |
| 121 | request_rate = None |
| 122 | crawl_delay = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 123 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 124 | |
| 125 | class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 126 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 127 | User-agent: * |
| 128 | Disallow: /tmp/ |
| 129 | Disallow: /a%3Cd.html |
| 130 | Disallow: /a/b.html |
| 131 | Disallow: /%7ejoe/index.html |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 132 | Crawl-delay: 3 |
| 133 | Request-rate: 9/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 134 | """ |
| 135 | good = ['/tmp'] |
| 136 | bad = ['/tmp/', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', '/a/b.html', |
| 137 | '/%7Ejoe/index.html'] |
| 138 | crawl_delay = 3 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 139 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 140 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 141 | class InvalidCrawlDelayTest(BaseRobotTest, unittest.TestCase): |
| 142 | # From bug report #523041 |
| 143 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 144 | User-Agent: * |
| 145 | Disallow: /. |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 146 | Crawl-delay: pears |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 147 | """ |
| 148 | good = ['/foo.html'] |
| 149 | # bug report says "/" should be denied, but that is not in the RFC |
| 150 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 151 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 152 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 153 | class AnotherInvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 154 | # also test that Allow and Diasallow works well with each other |
| 155 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 156 | User-agent: Googlebot |
| 157 | Allow: /folder1/myfile.html |
| 158 | Disallow: /folder1/ |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 159 | Request-rate: whale/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 160 | """ |
| 161 | agent = 'Googlebot' |
| 162 | good = ['/folder1/myfile.html'] |
| 163 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 164 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 165 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 166 | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
| 167 | # the order of User-agent should be correct. note |
| 168 | # that this file is incorrect because "Googlebot" is a |
| 169 | # substring of "Googlebot-Mobile" |
| 170 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 171 | User-agent: Googlebot |
| 172 | Disallow: / |
| 173 | |
| 174 | User-agent: Googlebot-Mobile |
| 175 | Allow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 176 | """ |
| 177 | agent = 'Googlebot' |
| 178 | bad = ['/something.jpg'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 179 | |
| 180 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 181 | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
| 182 | agent = 'Googlebot-Mobile' |
| 183 | |
| 184 | |
| 185 | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
| 186 | # Google also got the order wrong. You need |
| 187 | # to specify the URLs from more specific to more general |
| 188 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 189 | User-agent: Googlebot |
| 190 | Allow: /folder1/myfile.html |
| 191 | Disallow: /folder1/ |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 192 | """ |
| 193 | agent = 'googlebot' |
| 194 | good = ['/folder1/myfile.html'] |
| 195 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 196 | |
| 197 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 198 | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 199 | # see issue #6325 for details |
| 200 | robots_txt = """\ |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 201 | User-agent: * |
| 202 | Disallow: /some/path?name=value |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 203 | """ |
| 204 | good = ['/some/path'] |
| 205 | bad = ['/some/path?name=value'] |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 206 | |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 207 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 208 | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 209 | # obey first * entry (#4108) |
| 210 | robots_txt = """\ |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 211 | User-agent: * |
| 212 | Disallow: /some/path |
| 213 | |
| 214 | User-agent: * |
| 215 | Disallow: /another/path |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 216 | """ |
| 217 | good = ['/another/path'] |
| 218 | bad = ['/some/path'] |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 219 | |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 220 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 221 | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 222 | # normalize the URL first (#17403) |
| 223 | robots_txt = """\ |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 224 | User-agent: * |
| 225 | Allow: /some/path? |
| 226 | Disallow: /another/path? |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 227 | """ |
| 228 | good = ['/some/path?'] |
| 229 | bad = ['/another/path?'] |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 230 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 231 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 232 | class RobotHandler(BaseHTTPRequestHandler): |
| 233 | |
| 234 | def do_GET(self): |
| 235 | self.send_error(403, "Forbidden access") |
| 236 | |
| 237 | def log_message(self, format, *args): |
| 238 | pass |
| 239 | |
| 240 | |
Berker Peksag | ad324f6 | 2014-06-29 15:54:56 +0300 | [diff] [blame] | 241 | @unittest.skipUnless(threading, 'threading required for this test') |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 242 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 243 | |
| 244 | def setUp(self): |
| 245 | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
| 246 | |
| 247 | self.t = threading.Thread( |
| 248 | name='HTTPServer serving', |
| 249 | target=self.server.serve_forever, |
| 250 | # Short poll interval to make the test finish quickly. |
| 251 | # Time between requests is short enough that we won't wake |
| 252 | # up spuriously too many times. |
| 253 | kwargs={'poll_interval':0.01}) |
| 254 | self.t.daemon = True # In case this function raises. |
| 255 | self.t.start() |
| 256 | |
| 257 | def tearDown(self): |
| 258 | self.server.shutdown() |
| 259 | self.t.join() |
| 260 | self.server.server_close() |
| 261 | |
Berker Peksag | 2a9f5ed | 2016-09-11 15:17:53 +0300 | [diff] [blame] | 262 | @support.reap_threads |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 263 | def testPasswordProtectedSite(self): |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 264 | addr = self.server.server_address |
| 265 | url = 'http://' + support.HOST + ':' + str(addr[1]) |
| 266 | robots_url = url + "/robots.txt" |
| 267 | parser = urllib.robotparser.RobotFileParser() |
| 268 | parser.set_url(url) |
| 269 | parser.read() |
| 270 | self.assertFalse(parser.can_fetch("*", robots_url)) |
| 271 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 272 | |
| 273 | class NetworkTestCase(unittest.TestCase): |
Thomas Wouters | 47b49bf | 2007-08-30 22:15:33 +0000 | [diff] [blame] | 274 | |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 275 | def testPythonOrg(self): |
Florent Xicluna | 41fe615 | 2010-04-02 18:52:12 +0000 | [diff] [blame] | 276 | support.requires('network') |
Antoine Pitrou | 8bc0903 | 2010-09-07 21:09:09 +0000 | [diff] [blame] | 277 | with support.transient_internet('www.python.org'): |
| 278 | parser = urllib.robotparser.RobotFileParser( |
Berker Peksag | a3c1728 | 2016-09-11 15:46:47 +0300 | [diff] [blame] | 279 | "http://www.python.org/robots.txt") |
Antoine Pitrou | 8bc0903 | 2010-09-07 21:09:09 +0000 | [diff] [blame] | 280 | parser.read() |
| 281 | self.assertTrue( |
| 282 | parser.can_fetch("*", "http://www.python.org/robots.txt")) |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 283 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 284 | if __name__=='__main__': |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 285 | unittest.main() |