blob: 2a6d047eef5e53e8f485b31e00d0fa2f168d66e9 [file] [log] [blame]
Guido van Rossum34d19282007-08-09 01:03:29 +00001import io
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002import unittest
3import urllib.robotparser
Antoine Pitrou1bfd0cc2010-04-02 17:12:12 +00004from urllib.error import URLError
Benjamin Petersonee8712c2008-05-20 21:35:26 +00005from test import support
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +00006
7class RobotTestCase(unittest.TestCase):
8 def __init__(self, index, parser, url, good, agent):
9 unittest.TestCase.__init__(self)
10 if good:
11 self.str = "RobotTest(%d, good, %s)" % (index, url)
12 else:
13 self.str = "RobotTest(%d, bad, %s)" % (index, url)
14 self.parser = parser
15 self.url = url
16 self.good = good
17 self.agent = agent
18
19 def runTest(self):
20 if isinstance(self.url, tuple):
21 agent, url = self.url
22 else:
23 url = self.url
24 agent = self.agent
25 if self.good:
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000026 self.assertTrue(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000027 else:
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000028 self.assertFalse(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000029
30 def __str__(self):
31 return self.str
32
33tests = unittest.TestSuite()
34
35def RobotTest(index, robots_txt, good_urls, bad_urls,
36 agent="test_robotparser"):
Tim Peters863ac442002-04-16 01:38:40 +000037
Guido van Rossum34d19282007-08-09 01:03:29 +000038 lines = io.StringIO(robots_txt).readlines()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000039 parser = urllib.robotparser.RobotFileParser()
Tim Peters863ac442002-04-16 01:38:40 +000040 parser.parse(lines)
41 for url in good_urls:
42 tests.addTest(RobotTestCase(index, parser, url, 1, agent))
43 for url in bad_urls:
44 tests.addTest(RobotTestCase(index, parser, url, 0, agent))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000045
46# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
47
48# 1.
49doc = """
50User-agent: *
51Disallow: /cyberworld/map/ # This is an infinite virtual URL space
52Disallow: /tmp/ # these will soon disappear
53Disallow: /foo.html
54"""
55
56good = ['/','/test.html']
57bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
58
59RobotTest(1, doc, good, bad)
60
61# 2.
62doc = """
63# robots.txt for http://www.example.com/
64
65User-agent: *
66Disallow: /cyberworld/map/ # This is an infinite virtual URL space
67
68# Cybermapper knows where to go.
69User-agent: cybermapper
70Disallow:
71
72"""
73
74good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
75bad = ['/cyberworld/map/index.html']
76
77RobotTest(2, doc, good, bad)
78
79# 3.
80doc = """
81# go away
82User-agent: *
83Disallow: /
84"""
85
86good = []
87bad = ['/cyberworld/map/index.html','/','/tmp/']
88
89RobotTest(3, doc, good, bad)
90
91# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
92
93# 4.
94doc = """
95User-agent: figtree
96Disallow: /tmp
97Disallow: /a%3cd.html
98Disallow: /a%2fb.html
99Disallow: /%7ejoe/index.html
100"""
101
102good = [] # XFAIL '/a/b.html'
103bad = ['/tmp','/tmp.html','/tmp/a.html',
104 '/a%3cd.html','/a%3Cd.html','/a%2fb.html',
105 '/~joe/index.html'
106 ]
107
108RobotTest(4, doc, good, bad, 'figtree')
109RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
110
111# 6.
112doc = """
113User-agent: *
114Disallow: /tmp/
115Disallow: /a%3Cd.html
116Disallow: /a/b.html
117Disallow: /%7ejoe/index.html
118"""
119
120good = ['/tmp',] # XFAIL: '/a%2fb.html'
121bad = ['/tmp/','/tmp/a.html',
122 '/a%3cd.html','/a%3Cd.html',"/a/b.html",
Tim Peters863ac442002-04-16 01:38:40 +0000123 '/%7Ejoe/index.html']
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000124
125RobotTest(6, doc, good, bad)
126
127# From bug report #523041
128
129# 7.
130doc = """
131User-Agent: *
132Disallow: /.
133"""
134
135good = ['/foo.html']
136bad = [] # Bug report says "/" should be denied, but that is not in the RFC
137
138RobotTest(7, doc, good, bad)
139
Benjamin Petersond6313712008-07-31 16:23:04 +0000140# From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364
141
142# 8.
143doc = """
144User-agent: Googlebot
145Allow: /folder1/myfile.html
146Disallow: /folder1/
147"""
148
149good = ['/folder1/myfile.html']
150bad = ['/folder1/anotherfile.html']
151
152RobotTest(8, doc, good, bad, agent="Googlebot")
153
154# 9. This file is incorrect because "Googlebot" is a substring of
155# "Googlebot-Mobile", so test 10 works just like test 9.
156doc = """
157User-agent: Googlebot
158Disallow: /
159
160User-agent: Googlebot-Mobile
161Allow: /
162"""
163
164good = []
165bad = ['/something.jpg']
166
167RobotTest(9, doc, good, bad, agent="Googlebot")
168
169good = []
170bad = ['/something.jpg']
171
172RobotTest(10, doc, good, bad, agent="Googlebot-Mobile")
173
174# 11. Get the order correct.
175doc = """
176User-agent: Googlebot-Mobile
177Allow: /
178
179User-agent: Googlebot
180Disallow: /
181"""
182
183good = []
184bad = ['/something.jpg']
185
186RobotTest(11, doc, good, bad, agent="Googlebot")
187
188good = ['/something.jpg']
189bad = []
190
191RobotTest(12, doc, good, bad, agent="Googlebot-Mobile")
192
193
194# 13. Google also got the order wrong in #8. You need to specify the
195# URLs from more specific to more general.
196doc = """
197User-agent: Googlebot
198Allow: /folder1/myfile.html
199Disallow: /folder1/
200"""
201
202good = ['/folder1/myfile.html']
203bad = ['/folder1/anotherfile.html']
204
205RobotTest(13, doc, good, bad, agent="googlebot")
206
207
Senthil Kumaran3f8ab962010-07-28 16:27:56 +0000208# 14. For issue #6325 (query string support)
209doc = """
210User-agent: *
211Disallow: /some/path?name=value
212"""
213
214good = ['/some/path']
215bad = ['/some/path?name=value']
216
217RobotTest(14, doc, good, bad)
218
Georg Brandl0a0fc072010-07-29 17:55:01 +0000219# 15. For issue #4108 (obey first * entry)
220doc = """
221User-agent: *
222Disallow: /some/path
223
224User-agent: *
225Disallow: /another/path
226"""
227
228good = ['/another/path']
229bad = ['/some/path']
230
231RobotTest(15, doc, good, bad)
232
Benjamin Petersond6313712008-07-31 16:23:04 +0000233
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000234class NetworkTestCase(unittest.TestCase):
235
236 def testPasswordProtectedSite(self):
Florent Xicluna41fe6152010-04-02 18:52:12 +0000237 support.requires('network')
Antoine Pitrou8bc09032010-09-07 21:09:09 +0000238 with support.transient_internet('mueblesmoraleda.com'):
239 url = 'http://mueblesmoraleda.com'
240 parser = urllib.robotparser.RobotFileParser()
241 parser.set_url(url)
242 try:
243 parser.read()
244 except URLError:
245 self.skipTest('%s is unavailable' % url)
246 self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000247
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000248 def testPythonOrg(self):
Florent Xicluna41fe6152010-04-02 18:52:12 +0000249 support.requires('network')
Antoine Pitrou8bc09032010-09-07 21:09:09 +0000250 with support.transient_internet('www.python.org'):
251 parser = urllib.robotparser.RobotFileParser(
252 "http://www.python.org/robots.txt")
253 parser.read()
254 self.assertTrue(
255 parser.can_fetch("*", "http://www.python.org/robots.txt"))
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000256
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000257def test_main():
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000258 support.run_unittest(NetworkTestCase)
Benjamin Petersonee8712c2008-05-20 21:35:26 +0000259 support.run_unittest(tests)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000260
261if __name__=='__main__':
Georg Brandl3dbca812008-07-23 16:10:53 +0000262 support.verbose = 1
Guido van Rossumd8faa362007-04-27 19:54:29 +0000263 test_main()