Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 1 | import io |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 2 | import os |
Antoine Pitrou | a6a4dc8 | 2017-09-07 18:56:24 +0200 | [diff] [blame] | 3 | import threading |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 4 | import unittest |
| 5 | import urllib.robotparser |
Benjamin Peterson | ee8712c | 2008-05-20 21:35:26 +0000 | [diff] [blame] | 6 | from test import support |
Serhiy Storchaka | 1699491 | 2020-04-25 10:06:29 +0300 | [diff] [blame^] | 7 | from test.support import socket_helper |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 8 | from http.server import BaseHTTPRequestHandler, HTTPServer |
| 9 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 10 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 11 | class BaseRobotTest: |
| 12 | robots_txt = '' |
| 13 | agent = 'test_robotparser' |
| 14 | good = [] |
| 15 | bad = [] |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 16 | site_maps = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 17 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 18 | def setUp(self): |
| 19 | lines = io.StringIO(self.robots_txt).readlines() |
| 20 | self.parser = urllib.robotparser.RobotFileParser() |
| 21 | self.parser.parse(lines) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 22 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 23 | def get_agent_and_url(self, url): |
| 24 | if isinstance(url, tuple): |
| 25 | agent, url = url |
| 26 | return agent, url |
| 27 | return self.agent, url |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 28 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 29 | def test_good_urls(self): |
| 30 | for url in self.good: |
| 31 | agent, url = self.get_agent_and_url(url) |
| 32 | with self.subTest(url=url, agent=agent): |
| 33 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 34 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 35 | def test_bad_urls(self): |
| 36 | for url in self.bad: |
| 37 | agent, url = self.get_agent_and_url(url) |
| 38 | with self.subTest(url=url, agent=agent): |
| 39 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 40 | |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 41 | def test_site_maps(self): |
| 42 | self.assertEqual(self.parser.site_maps(), self.site_maps) |
| 43 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 44 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 45 | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 46 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 47 | User-agent: * |
| 48 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 49 | Disallow: /tmp/ # these will soon disappear |
| 50 | Disallow: /foo.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 51 | """ |
| 52 | good = ['/', '/test.html'] |
| 53 | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 54 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 55 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 56 | class CrawlDelayAndCustomAgentTest(BaseRobotTest, unittest.TestCase): |
| 57 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 58 | # robots.txt for http://www.example.com/ |
| 59 | |
| 60 | User-agent: * |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 61 | Crawl-delay: 1 |
| 62 | Request-rate: 3/15 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 63 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 64 | |
| 65 | # Cybermapper knows where to go. |
| 66 | User-agent: cybermapper |
| 67 | Disallow: |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 68 | """ |
| 69 | good = ['/', '/test.html', ('cybermapper', '/cyberworld/map/index.html')] |
| 70 | bad = ['/cyberworld/map/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 71 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 72 | |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 73 | class SitemapTest(BaseRobotTest, unittest.TestCase): |
| 74 | robots_txt = """\ |
| 75 | # robots.txt for http://www.example.com/ |
| 76 | |
| 77 | User-agent: * |
| 78 | Sitemap: http://www.gstatic.com/s2/sitemaps/profiles-sitemap.xml |
| 79 | Sitemap: http://www.google.com/hostednews/sitemap_index.xml |
| 80 | Request-rate: 3/15 |
| 81 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 82 | |
| 83 | """ |
| 84 | good = ['/', '/test.html'] |
| 85 | bad = ['/cyberworld/map/index.html'] |
| 86 | site_maps = ['http://www.gstatic.com/s2/sitemaps/profiles-sitemap.xml', |
| 87 | 'http://www.google.com/hostednews/sitemap_index.xml'] |
| 88 | |
| 89 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 90 | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
| 91 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 92 | # go away |
| 93 | User-agent: * |
| 94 | Disallow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 95 | """ |
| 96 | good = [] |
| 97 | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 98 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 99 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 100 | class BaseRequestRateTest(BaseRobotTest): |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 101 | request_rate = None |
| 102 | crawl_delay = None |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 103 | |
| 104 | def test_request_rate(self): |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 105 | parser = self.parser |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 106 | for url in self.good + self.bad: |
| 107 | agent, url = self.get_agent_and_url(url) |
| 108 | with self.subTest(url=url, agent=agent): |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 109 | self.assertEqual(parser.crawl_delay(agent), self.crawl_delay) |
| 110 | |
| 111 | parsed_request_rate = parser.request_rate(agent) |
| 112 | self.assertEqual(parsed_request_rate, self.request_rate) |
| 113 | if self.request_rate is not None: |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 114 | self.assertIsInstance( |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 115 | parsed_request_rate, |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 116 | urllib.robotparser.RequestRate |
| 117 | ) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 118 | self.assertEqual( |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 119 | parsed_request_rate.requests, |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 120 | self.request_rate.requests |
| 121 | ) |
| 122 | self.assertEqual( |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 123 | parsed_request_rate.seconds, |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 124 | self.request_rate.seconds |
| 125 | ) |
| 126 | |
| 127 | |
Rémi Lapeyre | 8047e0e | 2019-06-16 08:48:57 +0200 | [diff] [blame] | 128 | class EmptyFileTest(BaseRequestRateTest, unittest.TestCase): |
| 129 | robots_txt = '' |
| 130 | good = ['/foo'] |
| 131 | |
| 132 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 133 | class CrawlDelayAndRequestRateTest(BaseRequestRateTest, unittest.TestCase): |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 134 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 135 | User-agent: figtree |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 136 | Crawl-delay: 3 |
| 137 | Request-rate: 9/30 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 138 | Disallow: /tmp |
| 139 | Disallow: /a%3cd.html |
| 140 | Disallow: /a%2fb.html |
| 141 | Disallow: /%7ejoe/index.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 142 | """ |
| 143 | agent = 'figtree' |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 144 | request_rate = urllib.robotparser.RequestRate(9, 30) |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 145 | crawl_delay = 3 |
| 146 | good = [('figtree', '/foo.html')] |
| 147 | bad = ['/tmp', '/tmp.html', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', |
| 148 | '/a%2fb.html', '/~joe/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 149 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 150 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 151 | class DifferentAgentTest(CrawlDelayAndRequestRateTest): |
| 152 | agent = 'FigTree Robot libwww-perl/5.04' |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 153 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 154 | |
| 155 | class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 156 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 157 | User-agent: * |
| 158 | Disallow: /tmp/ |
| 159 | Disallow: /a%3Cd.html |
| 160 | Disallow: /a/b.html |
| 161 | Disallow: /%7ejoe/index.html |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 162 | Crawl-delay: 3 |
| 163 | Request-rate: 9/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 164 | """ |
| 165 | good = ['/tmp'] |
| 166 | bad = ['/tmp/', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', '/a/b.html', |
| 167 | '/%7Ejoe/index.html'] |
| 168 | crawl_delay = 3 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 169 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 170 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 171 | class InvalidCrawlDelayTest(BaseRobotTest, unittest.TestCase): |
| 172 | # From bug report #523041 |
| 173 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 174 | User-Agent: * |
| 175 | Disallow: /. |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 176 | Crawl-delay: pears |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 177 | """ |
| 178 | good = ['/foo.html'] |
| 179 | # bug report says "/" should be denied, but that is not in the RFC |
| 180 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 181 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 182 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 183 | class AnotherInvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 184 | # also test that Allow and Diasallow works well with each other |
| 185 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 186 | User-agent: Googlebot |
| 187 | Allow: /folder1/myfile.html |
| 188 | Disallow: /folder1/ |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 189 | Request-rate: whale/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 190 | """ |
| 191 | agent = 'Googlebot' |
| 192 | good = ['/folder1/myfile.html'] |
| 193 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 194 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 195 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 196 | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
| 197 | # the order of User-agent should be correct. note |
| 198 | # that this file is incorrect because "Googlebot" is a |
| 199 | # substring of "Googlebot-Mobile" |
| 200 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 201 | User-agent: Googlebot |
| 202 | Disallow: / |
| 203 | |
| 204 | User-agent: Googlebot-Mobile |
| 205 | Allow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 206 | """ |
| 207 | agent = 'Googlebot' |
| 208 | bad = ['/something.jpg'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 209 | |
| 210 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 211 | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
| 212 | agent = 'Googlebot-Mobile' |
| 213 | |
| 214 | |
| 215 | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
| 216 | # Google also got the order wrong. You need |
| 217 | # to specify the URLs from more specific to more general |
| 218 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 219 | User-agent: Googlebot |
| 220 | Allow: /folder1/myfile.html |
| 221 | Disallow: /folder1/ |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 222 | """ |
| 223 | agent = 'googlebot' |
| 224 | good = ['/folder1/myfile.html'] |
| 225 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 226 | |
| 227 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 228 | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 229 | # see issue #6325 for details |
| 230 | robots_txt = """\ |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 231 | User-agent: * |
| 232 | Disallow: /some/path?name=value |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 233 | """ |
| 234 | good = ['/some/path'] |
| 235 | bad = ['/some/path?name=value'] |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 236 | |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 237 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 238 | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 239 | # obey first * entry (#4108) |
| 240 | robots_txt = """\ |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 241 | User-agent: * |
| 242 | Disallow: /some/path |
| 243 | |
| 244 | User-agent: * |
| 245 | Disallow: /another/path |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 246 | """ |
| 247 | good = ['/another/path'] |
| 248 | bad = ['/some/path'] |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 249 | |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 250 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 251 | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 252 | # normalize the URL first (#17403) |
| 253 | robots_txt = """\ |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 254 | User-agent: * |
| 255 | Allow: /some/path? |
| 256 | Disallow: /another/path? |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 257 | """ |
| 258 | good = ['/some/path?'] |
| 259 | bad = ['/another/path?'] |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 260 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 261 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 262 | class DefaultEntryTest(BaseRequestRateTest, unittest.TestCase): |
| 263 | robots_txt = """\ |
| 264 | User-agent: * |
| 265 | Crawl-delay: 1 |
| 266 | Request-rate: 3/15 |
| 267 | Disallow: /cyberworld/map/ |
| 268 | """ |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 269 | request_rate = urllib.robotparser.RequestRate(3, 15) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 270 | crawl_delay = 1 |
| 271 | good = ['/', '/test.html'] |
| 272 | bad = ['/cyberworld/map/index.html'] |
| 273 | |
| 274 | |
Michael Lazar | bd08a0a | 2018-05-14 10:10:41 -0400 | [diff] [blame] | 275 | class StringFormattingTest(BaseRobotTest, unittest.TestCase): |
| 276 | robots_txt = """\ |
| 277 | User-agent: * |
| 278 | Crawl-delay: 1 |
| 279 | Request-rate: 3/15 |
| 280 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 281 | |
| 282 | # Cybermapper knows where to go. |
| 283 | User-agent: cybermapper |
| 284 | Disallow: /some/path |
| 285 | """ |
| 286 | |
| 287 | expected_output = """\ |
| 288 | User-agent: cybermapper |
| 289 | Disallow: /some/path |
| 290 | |
| 291 | User-agent: * |
| 292 | Crawl-delay: 1 |
| 293 | Request-rate: 3/15 |
| 294 | Disallow: /cyberworld/map/\ |
| 295 | """ |
| 296 | |
| 297 | def test_string_formatting(self): |
| 298 | self.assertEqual(str(self.parser), self.expected_output) |
| 299 | |
| 300 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 301 | class RobotHandler(BaseHTTPRequestHandler): |
| 302 | |
| 303 | def do_GET(self): |
| 304 | self.send_error(403, "Forbidden access") |
| 305 | |
| 306 | def log_message(self, format, *args): |
| 307 | pass |
| 308 | |
| 309 | |
| 310 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 311 | |
| 312 | def setUp(self): |
Victor Stinner | 7cb9204 | 2019-07-02 14:50:19 +0200 | [diff] [blame] | 313 | # clear _opener global variable |
| 314 | self.addCleanup(urllib.request.urlcleanup) |
| 315 | |
Serhiy Storchaka | 1699491 | 2020-04-25 10:06:29 +0300 | [diff] [blame^] | 316 | self.server = HTTPServer((socket_helper.HOST, 0), RobotHandler) |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 317 | |
| 318 | self.t = threading.Thread( |
| 319 | name='HTTPServer serving', |
| 320 | target=self.server.serve_forever, |
| 321 | # Short poll interval to make the test finish quickly. |
| 322 | # Time between requests is short enough that we won't wake |
| 323 | # up spuriously too many times. |
| 324 | kwargs={'poll_interval':0.01}) |
| 325 | self.t.daemon = True # In case this function raises. |
| 326 | self.t.start() |
| 327 | |
| 328 | def tearDown(self): |
| 329 | self.server.shutdown() |
| 330 | self.t.join() |
| 331 | self.server.server_close() |
| 332 | |
Berker Peksag | 2a9f5ed | 2016-09-11 15:17:53 +0300 | [diff] [blame] | 333 | @support.reap_threads |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 334 | def testPasswordProtectedSite(self): |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 335 | addr = self.server.server_address |
Serhiy Storchaka | 1699491 | 2020-04-25 10:06:29 +0300 | [diff] [blame^] | 336 | url = 'http://' + socket_helper.HOST + ':' + str(addr[1]) |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 337 | robots_url = url + "/robots.txt" |
| 338 | parser = urllib.robotparser.RobotFileParser() |
| 339 | parser.set_url(url) |
| 340 | parser.read() |
| 341 | self.assertFalse(parser.can_fetch("*", robots_url)) |
| 342 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 343 | |
| 344 | class NetworkTestCase(unittest.TestCase): |
Thomas Wouters | 47b49bf | 2007-08-30 22:15:33 +0000 | [diff] [blame] | 345 | |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 346 | base_url = 'http://www.pythontest.net/' |
| 347 | robots_txt = '{}elsewhere/robots.txt'.format(base_url) |
| 348 | |
| 349 | @classmethod |
| 350 | def setUpClass(cls): |
Florent Xicluna | 41fe615 | 2010-04-02 18:52:12 +0000 | [diff] [blame] | 351 | support.requires('network') |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 352 | with support.transient_internet(cls.base_url): |
| 353 | cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt) |
| 354 | cls.parser.read() |
| 355 | |
| 356 | def url(self, path): |
| 357 | return '{}{}{}'.format( |
| 358 | self.base_url, path, '/' if not os.path.splitext(path)[1] else '' |
| 359 | ) |
| 360 | |
| 361 | def test_basic(self): |
| 362 | self.assertFalse(self.parser.disallow_all) |
| 363 | self.assertFalse(self.parser.allow_all) |
| 364 | self.assertGreater(self.parser.mtime(), 0) |
| 365 | self.assertFalse(self.parser.crawl_delay('*')) |
| 366 | self.assertFalse(self.parser.request_rate('*')) |
| 367 | |
| 368 | def test_can_fetch(self): |
| 369 | self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere'))) |
| 370 | self.assertFalse(self.parser.can_fetch('Nutch', self.base_url)) |
| 371 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian'))) |
| 372 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats'))) |
| 373 | self.assertFalse(self.parser.can_fetch('*', self.url('webstats'))) |
| 374 | self.assertTrue(self.parser.can_fetch('*', self.base_url)) |
| 375 | |
| 376 | def test_read_404(self): |
| 377 | parser = urllib.robotparser.RobotFileParser(self.url('i-robot.txt')) |
| 378 | parser.read() |
| 379 | self.assertTrue(parser.allow_all) |
| 380 | self.assertFalse(parser.disallow_all) |
| 381 | self.assertEqual(parser.mtime(), 0) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 382 | self.assertIsNone(parser.crawl_delay('*')) |
| 383 | self.assertIsNone(parser.request_rate('*')) |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 384 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 385 | if __name__=='__main__': |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 386 | unittest.main() |