Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 1 | import os |
| 2 | import robotparser |
| 3 | import unittest |
| 4 | from test import support |
| 5 | from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer |
| 6 | import StringIO |
Ned Deily | c727533 | 2014-03-26 23:25:02 -0700 | [diff] [blame] | 7 | try: |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 8 | import threading |
Ned Deily | c727533 | 2014-03-26 23:25:02 -0700 | [diff] [blame] | 9 | except ImportError: |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 10 | threading = None |
Ned Deily | c727533 | 2014-03-26 23:25:02 -0700 | [diff] [blame] | 11 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 12 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 13 | class BaseRobotTest: |
| 14 | robots_txt = '' |
| 15 | agent = 'test_robotparser' |
| 16 | good = [] |
| 17 | bad = [] |
| 18 | |
| 19 | def setUp(self): |
| 20 | lines = StringIO.StringIO(self.robots_txt).readlines() |
| 21 | self.parser = robotparser.RobotFileParser() |
| 22 | self.parser.parse(lines) |
| 23 | |
| 24 | def get_agent_and_url(self, url): |
| 25 | if isinstance(url, tuple): |
| 26 | agent, url = url |
| 27 | return agent, url |
| 28 | return self.agent, url |
| 29 | |
| 30 | def test_good_urls(self): |
| 31 | for url in self.good: |
| 32 | agent, url = self.get_agent_and_url(url) |
Benjamin Peterson | 5c8da86 | 2009-06-30 22:57:08 +0000 | [diff] [blame] | 33 | self.assertTrue(self.parser.can_fetch(agent, url)) |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 34 | |
| 35 | def test_bad_urls(self): |
| 36 | for url in self.bad: |
| 37 | agent, url = self.get_agent_and_url(url) |
Benjamin Peterson | 5c8da86 | 2009-06-30 22:57:08 +0000 | [diff] [blame] | 38 | self.assertFalse(self.parser.can_fetch(agent, url)) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 39 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 40 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 41 | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 42 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 43 | User-agent: * |
| 44 | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
| 45 | Disallow: /tmp/ # these will soon disappear |
| 46 | Disallow: /foo.html |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 47 | """ |
| 48 | good = ['/', '/test.html'] |
| 49 | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 50 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 51 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 52 | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
| 53 | robots_txt = """\ |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 54 | # go away |
| 55 | User-agent: * |
| 56 | Disallow: / |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 57 | """ |
| 58 | good = [] |
| 59 | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 60 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 61 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 62 | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
| 63 | # the order of User-agent should be correct. note |
| 64 | # that this file is incorrect because "Googlebot" is a |
| 65 | # substring of "Googlebot-Mobile" |
| 66 | robots_txt = """\ |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 67 | User-agent: Googlebot |
| 68 | Disallow: / |
| 69 | |
| 70 | User-agent: Googlebot-Mobile |
| 71 | Allow: / |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 72 | """ |
| 73 | agent = 'Googlebot' |
| 74 | bad = ['/something.jpg'] |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 75 | |
| 76 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 77 | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
| 78 | agent = 'Googlebot-Mobile' |
| 79 | |
| 80 | |
| 81 | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
| 82 | # Google also got the order wrong. You need |
| 83 | # to specify the URLs from more specific to more general |
| 84 | robots_txt = """\ |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 85 | User-agent: Googlebot |
| 86 | Allow: /folder1/myfile.html |
| 87 | Disallow: /folder1/ |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 88 | """ |
| 89 | agent = 'googlebot' |
| 90 | good = ['/folder1/myfile.html'] |
| 91 | bad = ['/folder1/anotherfile.html'] |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 92 | |
| 93 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 94 | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 95 | # see issue #6325 for details |
| 96 | robots_txt = """\ |
Senthil Kumaran | a4f79f9 | 2010-07-28 16:35:35 +0000 | [diff] [blame] | 97 | User-agent: * |
| 98 | Disallow: /some/path?name=value |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 99 | """ |
| 100 | good = ['/some/path'] |
| 101 | bad = ['/some/path?name=value'] |
Senthil Kumaran | a4f79f9 | 2010-07-28 16:35:35 +0000 | [diff] [blame] | 102 | |
Senthil Kumaran | a4f79f9 | 2010-07-28 16:35:35 +0000 | [diff] [blame] | 103 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 104 | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
| 105 | # obey first * entry (#4108) |
| 106 | robots_txt = """\ |
Georg Brandl | 2bd953e | 2010-08-01 20:59:03 +0000 | [diff] [blame] | 107 | User-agent: * |
| 108 | Disallow: /some/path |
| 109 | |
| 110 | User-agent: * |
| 111 | Disallow: /another/path |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 112 | """ |
| 113 | good = ['/another/path'] |
| 114 | bad = ['/some/path'] |
Georg Brandl | 2bd953e | 2010-08-01 20:59:03 +0000 | [diff] [blame] | 115 | |
Georg Brandl | 2bd953e | 2010-08-01 20:59:03 +0000 | [diff] [blame] | 116 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 117 | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
| 118 | # normalize the URL first (#17403) |
| 119 | robots_txt = """\ |
Senthil Kumaran | 2c4810e | 2013-05-29 05:58:47 -0700 | [diff] [blame] | 120 | User-agent: * |
| 121 | Allow: /some/path? |
| 122 | Disallow: /another/path? |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 123 | """ |
| 124 | good = ['/some/path?'] |
| 125 | bad = ['/another/path?'] |
Senthil Kumaran | 2c4810e | 2013-05-29 05:58:47 -0700 | [diff] [blame] | 126 | |
Senthil Kumaran | 2c4810e | 2013-05-29 05:58:47 -0700 | [diff] [blame] | 127 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 128 | class DefaultEntryTest(BaseRobotTest, unittest.TestCase): |
| 129 | robots_txt = """\ |
| 130 | User-agent: * |
| 131 | Crawl-delay: 1 |
| 132 | Request-rate: 3/15 |
| 133 | Disallow: /cyberworld/map/ |
| 134 | """ |
| 135 | good = ['/', '/test.html'] |
| 136 | bad = ['/cyberworld/map/index.html'] |
| 137 | |
| 138 | |
| 139 | class RobotHandler(BaseHTTPRequestHandler): |
| 140 | |
| 141 | def do_GET(self): |
| 142 | self.send_error(403, "Forbidden access") |
| 143 | |
| 144 | def log_message(self, format, *args): |
| 145 | pass |
| 146 | |
| 147 | |
| 148 | @unittest.skipUnless(threading, 'threading required for this test') |
| 149 | class PasswordProtectedSiteTestCase(unittest.TestCase): |
| 150 | |
| 151 | def setUp(self): |
| 152 | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
| 153 | |
| 154 | self.t = threading.Thread( |
| 155 | name='HTTPServer serving', |
| 156 | target=self.server.serve_forever, |
| 157 | # Short poll interval to make the test finish quickly. |
| 158 | # Time between requests is short enough that we won't wake |
| 159 | # up spuriously too many times. |
| 160 | kwargs={'poll_interval':0.01}) |
| 161 | self.t.daemon = True # In case this function raises. |
| 162 | self.t.start() |
| 163 | |
| 164 | def tearDown(self): |
| 165 | self.server.shutdown() |
| 166 | self.t.join() |
| 167 | self.server.server_close() |
| 168 | |
| 169 | @support.reap_threads |
| 170 | def testPasswordProtectedSite(self): |
| 171 | addr = self.server.server_address |
| 172 | url = 'http://' + support.HOST + ':' + str(addr[1]) |
| 173 | robots_url = url + "/robots.txt" |
| 174 | parser = robotparser.RobotFileParser() |
| 175 | parser.set_url(url) |
| 176 | parser.read() |
| 177 | self.assertFalse(parser.can_fetch("*", robots_url)) |
Senthil Kumaran | 2c4810e | 2013-05-29 05:58:47 -0700 | [diff] [blame] | 178 | |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 179 | |
Florent Xicluna | f37592f | 2010-04-02 17:26:42 +0000 | [diff] [blame] | 180 | class NetworkTestCase(unittest.TestCase): |
| 181 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 182 | base_url = 'http://www.pythontest.net/' |
| 183 | robots_txt = '{}elsewhere/robots.txt'.format(base_url) |
Skip Montanaro | 1a41313 | 2007-08-28 23:22:52 +0000 | [diff] [blame] | 184 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 185 | @classmethod |
| 186 | def setUpClass(cls): |
| 187 | support.requires('network') |
| 188 | with support.transient_internet(cls.base_url): |
| 189 | cls.parser = robotparser.RobotFileParser(cls.robots_txt) |
| 190 | cls.parser.read() |
| 191 | |
| 192 | def url(self, path): |
| 193 | return '{}{}{}'.format( |
| 194 | self.base_url, path, '/' if not os.path.splitext(path)[1] else '' |
| 195 | ) |
| 196 | |
| 197 | def test_basic(self): |
| 198 | self.assertFalse(self.parser.disallow_all) |
| 199 | self.assertFalse(self.parser.allow_all) |
| 200 | self.assertGreater(self.parser.mtime(), 0) |
| 201 | |
| 202 | def test_can_fetch(self): |
| 203 | self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere'))) |
| 204 | self.assertFalse(self.parser.can_fetch('Nutch', self.base_url)) |
| 205 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian'))) |
| 206 | self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats'))) |
| 207 | self.assertFalse(self.parser.can_fetch('*', self.url('webstats'))) |
| 208 | self.assertTrue(self.parser.can_fetch('*', self.base_url)) |
| 209 | |
| 210 | def test_read_404(self): |
| 211 | parser = robotparser.RobotFileParser(self.url('i-robot.txt')) |
| 212 | parser.read() |
| 213 | self.assertTrue(parser.allow_all) |
| 214 | self.assertFalse(parser.disallow_all) |
| 215 | self.assertEqual(parser.mtime(), 0) |
Florent Xicluna | f37592f | 2010-04-02 17:26:42 +0000 | [diff] [blame] | 216 | |
| 217 | |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 218 | def test_main(): |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 219 | support.run_unittest( |
| 220 | UserAgentWildcardTest, |
| 221 | RejectAllRobotsTest, |
| 222 | UserAgentOrderingTest, |
| 223 | UserAgentGoogleMobileTest, |
| 224 | GoogleURLOrderingTest, |
| 225 | DisallowQueryStringTest, |
| 226 | UseFirstUserAgentWildcardTest, |
| 227 | EmptyQueryStringTest, |
| 228 | DefaultEntryTest, |
| 229 | PasswordProtectedSiteTestCase, |
| 230 | NetworkTestCase) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 231 | |
Victor Stinner | 668489a | 2017-07-05 10:00:33 +0200 | [diff] [blame] | 232 | |
| 233 | if __name__ == "__main__": |
Collin Winter | c2898c5 | 2007-04-25 17:29:52 +0000 | [diff] [blame] | 234 | test_main() |