blob: b3d4a46056bb0d03d1d218c43b9185ee7daaea8b [file] [log] [blame]
Barry Warsaw04f357c2002-07-23 19:04:11 +00001import unittest, StringIO, robotparser
2from test import test_support
Antoine Pitrou29603082011-07-08 19:40:15 +02003from urllib2 import urlopen, HTTPError
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +00004
5class RobotTestCase(unittest.TestCase):
6 def __init__(self, index, parser, url, good, agent):
7 unittest.TestCase.__init__(self)
8 if good:
9 self.str = "RobotTest(%d, good, %s)" % (index, url)
10 else:
11 self.str = "RobotTest(%d, bad, %s)" % (index, url)
12 self.parser = parser
13 self.url = url
14 self.good = good
15 self.agent = agent
16
17 def runTest(self):
18 if isinstance(self.url, tuple):
19 agent, url = self.url
20 else:
21 url = self.url
22 agent = self.agent
23 if self.good:
Benjamin Peterson5c8da862009-06-30 22:57:08 +000024 self.assertTrue(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000025 else:
Benjamin Peterson5c8da862009-06-30 22:57:08 +000026 self.assertFalse(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000027
28 def __str__(self):
29 return self.str
30
31tests = unittest.TestSuite()
32
33def RobotTest(index, robots_txt, good_urls, bad_urls,
34 agent="test_robotparser"):
Tim Peters863ac442002-04-16 01:38:40 +000035
36 lines = StringIO.StringIO(robots_txt).readlines()
37 parser = robotparser.RobotFileParser()
38 parser.parse(lines)
39 for url in good_urls:
40 tests.addTest(RobotTestCase(index, parser, url, 1, agent))
41 for url in bad_urls:
42 tests.addTest(RobotTestCase(index, parser, url, 0, agent))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000043
44# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
45
46# 1.
47doc = """
48User-agent: *
49Disallow: /cyberworld/map/ # This is an infinite virtual URL space
50Disallow: /tmp/ # these will soon disappear
51Disallow: /foo.html
52"""
53
54good = ['/','/test.html']
55bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
56
57RobotTest(1, doc, good, bad)
58
59# 2.
60doc = """
61# robots.txt for http://www.example.com/
62
63User-agent: *
64Disallow: /cyberworld/map/ # This is an infinite virtual URL space
65
66# Cybermapper knows where to go.
67User-agent: cybermapper
68Disallow:
69
70"""
71
72good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
73bad = ['/cyberworld/map/index.html']
74
75RobotTest(2, doc, good, bad)
76
77# 3.
78doc = """
79# go away
80User-agent: *
81Disallow: /
82"""
83
84good = []
85bad = ['/cyberworld/map/index.html','/','/tmp/']
86
87RobotTest(3, doc, good, bad)
88
89# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
90
91# 4.
92doc = """
93User-agent: figtree
94Disallow: /tmp
95Disallow: /a%3cd.html
96Disallow: /a%2fb.html
97Disallow: /%7ejoe/index.html
98"""
99
100good = [] # XFAIL '/a/b.html'
101bad = ['/tmp','/tmp.html','/tmp/a.html',
102 '/a%3cd.html','/a%3Cd.html','/a%2fb.html',
103 '/~joe/index.html'
104 ]
105
106RobotTest(4, doc, good, bad, 'figtree')
107RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
108
109# 6.
110doc = """
111User-agent: *
112Disallow: /tmp/
113Disallow: /a%3Cd.html
114Disallow: /a/b.html
115Disallow: /%7ejoe/index.html
116"""
117
118good = ['/tmp',] # XFAIL: '/a%2fb.html'
119bad = ['/tmp/','/tmp/a.html',
120 '/a%3cd.html','/a%3Cd.html',"/a/b.html",
Tim Peters863ac442002-04-16 01:38:40 +0000121 '/%7Ejoe/index.html']
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000122
123RobotTest(6, doc, good, bad)
124
125# From bug report #523041
126
127# 7.
128doc = """
129User-Agent: *
130Disallow: /.
131"""
132
133good = ['/foo.html']
134bad = [] # Bug report says "/" should be denied, but that is not in the RFC
135
136RobotTest(7, doc, good, bad)
137
Skip Montanaro1ef19f02008-07-27 00:49:02 +0000138# From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364
139
140# 8.
141doc = """
142User-agent: Googlebot
143Allow: /folder1/myfile.html
144Disallow: /folder1/
145"""
146
147good = ['/folder1/myfile.html']
148bad = ['/folder1/anotherfile.html']
149
150RobotTest(8, doc, good, bad, agent="Googlebot")
151
152# 9. This file is incorrect because "Googlebot" is a substring of
153# "Googlebot-Mobile", so test 10 works just like test 9.
154doc = """
155User-agent: Googlebot
156Disallow: /
157
158User-agent: Googlebot-Mobile
159Allow: /
160"""
161
162good = []
163bad = ['/something.jpg']
164
165RobotTest(9, doc, good, bad, agent="Googlebot")
166
167good = []
168bad = ['/something.jpg']
169
170RobotTest(10, doc, good, bad, agent="Googlebot-Mobile")
171
172# 11. Get the order correct.
173doc = """
174User-agent: Googlebot-Mobile
175Allow: /
176
177User-agent: Googlebot
178Disallow: /
179"""
180
181good = []
182bad = ['/something.jpg']
183
184RobotTest(11, doc, good, bad, agent="Googlebot")
185
186good = ['/something.jpg']
187bad = []
188
189RobotTest(12, doc, good, bad, agent="Googlebot-Mobile")
190
191
192# 13. Google also got the order wrong in #8. You need to specify the
193# URLs from more specific to more general.
194doc = """
195User-agent: Googlebot
196Allow: /folder1/myfile.html
197Disallow: /folder1/
198"""
199
200good = ['/folder1/myfile.html']
201bad = ['/folder1/anotherfile.html']
202
203RobotTest(13, doc, good, bad, agent="googlebot")
204
205
Senthil Kumarana4f79f92010-07-28 16:35:35 +0000206# 14. For issue #6325 (query string support)
207doc = """
208User-agent: *
209Disallow: /some/path?name=value
210"""
211
212good = ['/some/path']
213bad = ['/some/path?name=value']
214
215RobotTest(14, doc, good, bad)
216
Georg Brandl2bd953e2010-08-01 20:59:03 +0000217# 15. For issue #4108 (obey first * entry)
218doc = """
219User-agent: *
220Disallow: /some/path
221
222User-agent: *
223Disallow: /another/path
224"""
225
226good = ['/another/path']
227bad = ['/some/path']
228
229RobotTest(15, doc, good, bad)
230
Skip Montanaro1ef19f02008-07-27 00:49:02 +0000231
Florent Xiclunaf37592f2010-04-02 17:26:42 +0000232class NetworkTestCase(unittest.TestCase):
233
234 def testPasswordProtectedSite(self):
Skip Montanaro1a413132007-08-28 23:22:52 +0000235 test_support.requires('network')
Antoine Pitrouc818ed42010-09-07 21:40:25 +0000236 with test_support.transient_internet('mueblesmoraleda.com'):
237 url = 'http://mueblesmoraleda.com'
Antoine Pitrou29603082011-07-08 19:40:15 +0200238 robots_url = url + "/robots.txt"
239 # First check the URL is usable for our purposes, since the
240 # test site is a bit flaky.
241 try:
242 urlopen(robots_url)
243 except HTTPError as e:
244 if e.code not in {401, 403}:
245 self.skipTest(
246 "%r should return a 401 or 403 HTTP error, not %r"
247 % (robots_url, e.code))
248 else:
249 self.skipTest(
250 "%r should return a 401 or 403 HTTP error, not succeed"
251 % (robots_url))
Antoine Pitrouc818ed42010-09-07 21:40:25 +0000252 parser = robotparser.RobotFileParser()
253 parser.set_url(url)
254 try:
255 parser.read()
256 except IOError:
257 self.skipTest('%s is unavailable' % url)
Antoine Pitrou29603082011-07-08 19:40:15 +0200258 self.assertEqual(parser.can_fetch("*", robots_url), False)
Skip Montanaro1a413132007-08-28 23:22:52 +0000259
Florent Xiclunaf37592f2010-04-02 17:26:42 +0000260 def testPythonOrg(self):
261 test_support.requires('network')
Antoine Pitrouc818ed42010-09-07 21:40:25 +0000262 with test_support.transient_internet('www.python.org'):
263 parser = robotparser.RobotFileParser(
264 "http://www.python.org/robots.txt")
265 parser.read()
266 self.assertTrue(
267 parser.can_fetch("*", "http://www.python.org/robots.txt"))
Florent Xiclunaf37592f2010-04-02 17:26:42 +0000268
269
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000270def test_main():
Collin Winterc2898c52007-04-25 17:29:52 +0000271 test_support.run_unittest(tests)
Florent Xiclunaf37592f2010-04-02 17:26:42 +0000272 test_support.run_unittest(NetworkTestCase)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000273
274if __name__=='__main__':
Georg Brandl730c8182008-07-18 10:29:30 +0000275 test_support.verbose = 1
Collin Winterc2898c52007-04-25 17:29:52 +0000276 test_main()