Guido van Rossum | 34d1928 | 2007-08-09 01:03:29 +0000 | [diff] [blame] | 1 | import io |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 2 | import os |
Antoine Pitrou | a6a4dc8 | 2017-09-07 18:56:24 +0200 | [diff] [blame] | 3 | import threading |
Jeremy Hylton | 1afc169 | 2008-06-18 20:49:58 +0000 | [diff] [blame] | 4 | import unittest |
| 5 | import urllib.robotparser |
Benjamin Peterson | ee8712c | 2008-05-20 21:35:26 +0000 | [diff] [blame] | 6 | from test import support |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 7 | from http.server import BaseHTTPRequestHandler, HTTPServer |
| 8 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 9 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 10 | class BaseRobotTest: |
| 11 | robots_txt = '' |
| 12 | agent = 'test_robotparser' |
| 13 | good = [] |
| 14 | bad = [] |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 15 | site_maps = None |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 16 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 17 | def setUp(self): |
| 18 | lines = io.StringIO(self.robots_txt).readlines() |
| 19 | self.parser = urllib.robotparser.RobotFileParser() |
| 20 | self.parser.parse(lines) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 21 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 22 | def get_agent_and_url(self, url): |
| 23 | if isinstance(url, tuple): |
| 24 | agent, url = url |
| 25 | return agent, url |
| 26 | return self.agent, url |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 27 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 28 | def test_good_urls(self): |
| 29 | for url in self.good: |
| 30 | agent, url = self.get_agent_and_url(url) |
| 31 | with self.subTest(url=url, agent=agent): |
| 32 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 33 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 34 | def test_bad_urls(self): |
| 35 | for url in self.bad: |
| 36 | agent, url = self.get_agent_and_url(url) |
| 37 | with self.subTest(url=url, agent=agent): |
| 38 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Tim Peters | 863ac44 | 2002-04-16 01:38:40 +0000 | [diff] [blame] | 39 | |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 40 | def test_site_maps(self): |
| 41 | self.assertEqual(self.parser.site_maps(), self.site_maps) |
| 42 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 43 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 44 | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 45 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 46 | User-agent: * |
| 47 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 48 | Disallow: /tmp/ # these will soon disappear |
| 49 | Disallow: /foo.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 50 | """ |
| 51 | good = ['/', '/test.html'] |
| 52 | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 53 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 54 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 55 | class CrawlDelayAndCustomAgentTest(BaseRobotTest, unittest.TestCase): |
| 56 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 57 | # robots.txt for http://www.example.com/ |
| 58 | |
| 59 | User-agent: * |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 60 | Crawl-delay: 1 |
| 61 | Request-rate: 3/15 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 62 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 63 | |
| 64 | # Cybermapper knows where to go. |
| 65 | User-agent: cybermapper |
| 66 | Disallow: |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 67 | """ |
| 68 | good = ['/', '/test.html', ('cybermapper', '/cyberworld/map/index.html')] |
| 69 | bad = ['/cyberworld/map/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 70 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 71 | |
Christopher Beacham | 5db5c06 | 2018-05-16 07:52:07 -0700 | [diff] [blame] | 72 | class SitemapTest(BaseRobotTest, unittest.TestCase): |
| 73 | robots_txt = """\ |
| 74 | # robots.txt for http://www.example.com/ |
| 75 | |
| 76 | User-agent: * |
| 77 | Sitemap: http://www.gstatic.com/s2/sitemaps/profiles-sitemap.xml |
| 78 | Sitemap: http://www.google.com/hostednews/sitemap_index.xml |
| 79 | Request-rate: 3/15 |
| 80 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 81 | |
| 82 | """ |
| 83 | good = ['/', '/test.html'] |
| 84 | bad = ['/cyberworld/map/index.html'] |
| 85 | site_maps = ['http://www.gstatic.com/s2/sitemaps/profiles-sitemap.xml', |
| 86 | 'http://www.google.com/hostednews/sitemap_index.xml'] |
| 87 | |
| 88 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 89 | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
| 90 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 91 | # go away |
| 92 | User-agent: * |
| 93 | Disallow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 94 | """ |
| 95 | good = [] |
| 96 | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 97 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 98 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 99 | class BaseRequestRateTest(BaseRobotTest): |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 100 | request_rate = None |
| 101 | crawl_delay = None |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 102 | |
| 103 | def test_request_rate(self): |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 104 | parser = self.parser |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 105 | for url in self.good + self.bad: |
| 106 | agent, url = self.get_agent_and_url(url) |
| 107 | with self.subTest(url=url, agent=agent): |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 108 | self.assertEqual(parser.crawl_delay(agent), self.crawl_delay) |
| 109 | |
| 110 | parsed_request_rate = parser.request_rate(agent) |
| 111 | self.assertEqual(parsed_request_rate, self.request_rate) |
| 112 | if self.request_rate is not None: |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 113 | self.assertIsInstance( |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 114 | parsed_request_rate, |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 115 | urllib.robotparser.RequestRate |
| 116 | ) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 117 | self.assertEqual( |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 118 | parsed_request_rate.requests, |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 119 | self.request_rate.requests |
| 120 | ) |
| 121 | self.assertEqual( |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 122 | parsed_request_rate.seconds, |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 123 | self.request_rate.seconds |
| 124 | ) |
| 125 | |
| 126 | |
Miss Islington (bot) | 58a1a76 | 2019-06-16 00:07:54 -0700 | [diff] [blame] | 127 | class EmptyFileTest(BaseRequestRateTest, unittest.TestCase): |
| 128 | robots_txt = '' |
| 129 | good = ['/foo'] |
| 130 | |
| 131 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 132 | class CrawlDelayAndRequestRateTest(BaseRequestRateTest, unittest.TestCase): |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 133 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 134 | User-agent: figtree |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 135 | Crawl-delay: 3 |
| 136 | Request-rate: 9/30 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 137 | Disallow: /tmp |
| 138 | Disallow: /a%3cd.html |
| 139 | Disallow: /a%2fb.html |
| 140 | Disallow: /%7ejoe/index.html |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 141 | """ |
| 142 | agent = 'figtree' |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 143 | request_rate = urllib.robotparser.RequestRate(9, 30) |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 144 | crawl_delay = 3 |
| 145 | good = [('figtree', '/foo.html')] |
| 146 | bad = ['/tmp', '/tmp.html', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', |
| 147 | '/a%2fb.html', '/~joe/index.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 148 | |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 149 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 150 | class DifferentAgentTest(CrawlDelayAndRequestRateTest): |
| 151 | agent = 'FigTree Robot libwww-perl/5.04' |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 152 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 153 | |
| 154 | class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 155 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 156 | User-agent: * |
| 157 | Disallow: /tmp/ |
| 158 | Disallow: /a%3Cd.html |
| 159 | Disallow: /a/b.html |
| 160 | Disallow: /%7ejoe/index.html |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 161 | Crawl-delay: 3 |
| 162 | Request-rate: 9/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 163 | """ |
| 164 | good = ['/tmp'] |
| 165 | bad = ['/tmp/', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', '/a/b.html', |
| 166 | '/%7Ejoe/index.html'] |
| 167 | crawl_delay = 3 |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 168 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 169 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 170 | class InvalidCrawlDelayTest(BaseRobotTest, unittest.TestCase): |
| 171 | # From bug report #523041 |
| 172 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 173 | User-Agent: * |
| 174 | Disallow: /. |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 175 | Crawl-delay: pears |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 176 | """ |
| 177 | good = ['/foo.html'] |
| 178 | # bug report says "/" should be denied, but that is not in the RFC |
| 179 | bad = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 180 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 181 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 182 | class AnotherInvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
| 183 | # also test that Allow and Diasallow works well with each other |
| 184 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 185 | User-agent: Googlebot |
| 186 | Allow: /folder1/myfile.html |
| 187 | Disallow: /folder1/ |
Berker Peksag | 960e848 | 2015-10-08 12:27:06 +0300 | [diff] [blame] | 188 | Request-rate: whale/banana |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 189 | """ |
| 190 | agent = 'Googlebot' |
| 191 | good = ['/folder1/myfile.html'] |
| 192 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 193 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 194 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 195 | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
| 196 | # the order of User-agent should be correct. note |
| 197 | # that this file is incorrect because "Googlebot" is a |
| 198 | # substring of "Googlebot-Mobile" |
| 199 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 200 | User-agent: Googlebot |
| 201 | Disallow: / |
| 202 | |
| 203 | User-agent: Googlebot-Mobile |
| 204 | Allow: / |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 205 | """ |
| 206 | agent = 'Googlebot' |
| 207 | bad = ['/something.jpg'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 208 | |
| 209 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 210 | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
| 211 | agent = 'Googlebot-Mobile' |
| 212 | |
| 213 | |
| 214 | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
| 215 | # Google also got the order wrong. You need |
| 216 | # to specify the URLs from more specific to more general |
| 217 | robots_txt = """\ |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 218 | User-agent: Googlebot |
| 219 | Allow: /folder1/myfile.html |
| 220 | Disallow: /folder1/ |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 221 | """ |
| 222 | agent = 'googlebot' |
| 223 | good = ['/folder1/myfile.html'] |
| 224 | bad = ['/folder1/anotherfile.html'] |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 225 | |
| 226 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 227 | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 228 | # see issue #6325 for details |
| 229 | robots_txt = """\ |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 230 | User-agent: * |
| 231 | Disallow: /some/path?name=value |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 232 | """ |
| 233 | good = ['/some/path'] |
| 234 | bad = ['/some/path?name=value'] |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 235 | |
Senthil Kumaran | 3f8ab96 | 2010-07-28 16:27:56 +0000 | [diff] [blame] | 236 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 237 | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 238 | # obey first * entry (#4108) |
| 239 | robots_txt = """\ |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 240 | User-agent: * |
| 241 | Disallow: /some/path |
| 242 | |
| 243 | User-agent: * |
| 244 | Disallow: /another/path |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 245 | """ |
| 246 | good = ['/another/path'] |
| 247 | bad = ['/some/path'] |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 248 | |
Georg Brandl | 0a0fc07 | 2010-07-29 17:55:01 +0000 | [diff] [blame] | 249 | |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 250 | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 251 | # normalize the URL first (#17403) |
| 252 | robots_txt = """\ |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 253 | User-agent: * |
| 254 | Allow: /some/path? |
| 255 | Disallow: /another/path? |
Berker Peksag | 4da0fd0 | 2016-09-11 14:53:16 +0300 | [diff] [blame] | 256 | """ |
| 257 | good = ['/some/path?'] |
| 258 | bad = ['/another/path?'] |
Senthil Kumaran | c70a6ae | 2013-05-29 05:54:31 -0700 | [diff] [blame] | 259 | |
Benjamin Peterson | d631371 | 2008-07-31 16:23:04 +0000 | [diff] [blame] | 260 | |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 261 | class DefaultEntryTest(BaseRequestRateTest, unittest.TestCase): |
| 262 | robots_txt = """\ |
| 263 | User-agent: * |
| 264 | Crawl-delay: 1 |
| 265 | Request-rate: 3/15 |
| 266 | Disallow: /cyberworld/map/ |
| 267 | """ |
Berker Peksag | 3df02db | 2017-11-24 02:40:26 +0300 | [diff] [blame] | 268 | request_rate = urllib.robotparser.RequestRate(3, 15) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 269 | crawl_delay = 1 |
| 270 | good = ['/', '/test.html'] |
| 271 | bad = ['/cyberworld/map/index.html'] |
| 272 | |
| 273 | |
Michael Lazar | bd08a0a | 2018-05-14 10:10:41 -0400 | [diff] [blame] | 274 | class StringFormattingTest(BaseRobotTest, unittest.TestCase): |
| 275 | robots_txt = """\ |
| 276 | User-agent: * |
| 277 | Crawl-delay: 1 |
| 278 | Request-rate: 3/15 |
| 279 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 280 | |
| 281 | # Cybermapper knows where to go. |
| 282 | User-agent: cybermapper |
| 283 | Disallow: /some/path |
| 284 | """ |
| 285 | |
| 286 | expected_output = """\ |
| 287 | User-agent: cybermapper |
| 288 | Disallow: /some/path |
| 289 | |
| 290 | User-agent: * |
| 291 | Crawl-delay: 1 |
| 292 | Request-rate: 3/15 |
| 293 | Disallow: /cyberworld/map/\ |
| 294 | """ |
| 295 | |
| 296 | def test_string_formatting(self): |
| 297 | self.assertEqual(str(self.parser), self.expected_output) |
| 298 | |
| 299 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 300 | class RobotHandler(BaseHTTPRequestHandler): |
| 301 | |
| 302 | def do_GET(self): |
| 303 | self.send_error(403, "Forbidden access") |
| 304 | |
| 305 | def log_message(self, format, *args): |
| 306 | pass |
| 307 | |
| 308 | |
| 309 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 310 | |
| 311 | def setUp(self): |
| 312 | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
| 313 | |
| 314 | self.t = threading.Thread( |
| 315 | name='HTTPServer serving', |
| 316 | target=self.server.serve_forever, |
| 317 | # Short poll interval to make the test finish quickly. |
| 318 | # Time between requests is short enough that we won't wake |
| 319 | # up spuriously too many times. |
| 320 | kwargs={'poll_interval':0.01}) |
| 321 | self.t.daemon = True # In case this function raises. |
| 322 | self.t.start() |
| 323 | |
| 324 | def tearDown(self): |
| 325 | self.server.shutdown() |
| 326 | self.t.join() |
| 327 | self.server.server_close() |
| 328 | |
Berker Peksag | 2a9f5ed | 2016-09-11 15:17:53 +0300 | [diff] [blame] | 329 | @support.reap_threads |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 330 | def testPasswordProtectedSite(self): |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 331 | addr = self.server.server_address |
| 332 | url = 'http://' + support.HOST + ':' + str(addr[1]) |
| 333 | robots_url = url + "/robots.txt" |
| 334 | parser = urllib.robotparser.RobotFileParser() |
| 335 | parser.set_url(url) |
| 336 | parser.read() |
| 337 | self.assertFalse(parser.can_fetch("*", robots_url)) |
| 338 | |
Senthil Kumaran | 601d6ec | 2014-06-25 02:58:15 -0700 | [diff] [blame] | 339 | |
| 340 | class NetworkTestCase(unittest.TestCase): |
Thomas Wouters | 47b49bf | 2007-08-30 22:15:33 +0000 | [diff] [blame] | 341 | |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 342 | base_url = 'http://www.pythontest.net/' |
| 343 | robots_txt = '{}elsewhere/robots.txt'.format(base_url) |
| 344 | |
| 345 | @classmethod |
| 346 | def setUpClass(cls): |
Florent Xicluna | 41fe615 | 2010-04-02 18:52:12 +0000 | [diff] [blame] | 347 | support.requires('network') |
Berker Peksag | 2a8d7f1 | 2016-09-18 11:21:57 +0300 | [diff] [blame] | 348 | with support.transient_internet(cls.base_url): |
| 349 | cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt) |
| 350 | cls.parser.read() |
| 351 | |
| 352 | def url(self, path): |
| 353 | return '{}{}{}'.format( |
| 354 | self.base_url, path, '/' if not os.path.splitext(path)[1] else '' |
| 355 | ) |
| 356 | |
| 357 | def test_basic(self): |
| 358 | self.assertFalse(self.parser.disallow_all) |
| 359 | self.assertFalse(self.parser.allow_all) |
| 360 | self.assertGreater(self.parser.mtime(), 0) |
| 361 | self.assertFalse(self.parser.crawl_delay('*')) |
| 362 | self.assertFalse(self.parser.request_rate('*')) |
| 363 | |
| 364 | def test_can_fetch(self): |
| 365 | self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere'))) |
| 366 | self.assertFalse(self.parser.can_fetch('Nutch', self.base_url)) |
| 367 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian'))) |
| 368 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats'))) |
| 369 | self.assertFalse(self.parser.can_fetch('*', self.url('webstats'))) |
| 370 | self.assertTrue(self.parser.can_fetch('*', self.base_url)) |
| 371 | |
| 372 | def test_read_404(self): |
| 373 | parser = urllib.robotparser.RobotFileParser(self.url('i-robot.txt')) |
| 374 | parser.read() |
| 375 | self.assertTrue(parser.allow_all) |
| 376 | self.assertFalse(parser.disallow_all) |
| 377 | self.assertEqual(parser.mtime(), 0) |
Berker Peksag | 9a7bbb2 | 2016-09-18 20:17:58 +0300 | [diff] [blame] | 378 | self.assertIsNone(parser.crawl_delay('*')) |
| 379 | self.assertIsNone(parser.request_rate('*')) |
Jeremy Hylton | 73fd46d | 2008-07-18 20:59:44 +0000 | [diff] [blame] | 380 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 381 | if __name__=='__main__': |
Ezio Melotti | 0fb37ea | 2013-03-12 07:49:12 +0200 | [diff] [blame] | 382 | unittest.main() |