blob: cc8b3900e44fdb4c1d8496206726e48ccff016d3 [file] [log] [blame]
Guido van Rossum34d19282007-08-09 01:03:29 +00001import io
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002import unittest
3import urllib.robotparser
Benjamin Petersonee8712c2008-05-20 21:35:26 +00004from test import support
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +00005
6class RobotTestCase(unittest.TestCase):
7 def __init__(self, index, parser, url, good, agent):
8 unittest.TestCase.__init__(self)
9 if good:
10 self.str = "RobotTest(%d, good, %s)" % (index, url)
11 else:
12 self.str = "RobotTest(%d, bad, %s)" % (index, url)
13 self.parser = parser
14 self.url = url
15 self.good = good
16 self.agent = agent
17
18 def runTest(self):
19 if isinstance(self.url, tuple):
20 agent, url = self.url
21 else:
22 url = self.url
23 agent = self.agent
24 if self.good:
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000025 self.assertTrue(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000026 else:
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000027 self.assertFalse(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000028
29 def __str__(self):
30 return self.str
31
32tests = unittest.TestSuite()
33
34def RobotTest(index, robots_txt, good_urls, bad_urls,
35 agent="test_robotparser"):
Tim Peters863ac442002-04-16 01:38:40 +000036
Guido van Rossum34d19282007-08-09 01:03:29 +000037 lines = io.StringIO(robots_txt).readlines()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000038 parser = urllib.robotparser.RobotFileParser()
Tim Peters863ac442002-04-16 01:38:40 +000039 parser.parse(lines)
40 for url in good_urls:
41 tests.addTest(RobotTestCase(index, parser, url, 1, agent))
42 for url in bad_urls:
43 tests.addTest(RobotTestCase(index, parser, url, 0, agent))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000044
45# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
46
47# 1.
48doc = """
49User-agent: *
50Disallow: /cyberworld/map/ # This is an infinite virtual URL space
51Disallow: /tmp/ # these will soon disappear
52Disallow: /foo.html
53"""
54
55good = ['/','/test.html']
56bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
57
58RobotTest(1, doc, good, bad)
59
60# 2.
61doc = """
62# robots.txt for http://www.example.com/
63
64User-agent: *
65Disallow: /cyberworld/map/ # This is an infinite virtual URL space
66
67# Cybermapper knows where to go.
68User-agent: cybermapper
69Disallow:
70
71"""
72
73good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
74bad = ['/cyberworld/map/index.html']
75
76RobotTest(2, doc, good, bad)
77
78# 3.
79doc = """
80# go away
81User-agent: *
82Disallow: /
83"""
84
85good = []
86bad = ['/cyberworld/map/index.html','/','/tmp/']
87
88RobotTest(3, doc, good, bad)
89
90# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
91
92# 4.
93doc = """
94User-agent: figtree
95Disallow: /tmp
96Disallow: /a%3cd.html
97Disallow: /a%2fb.html
98Disallow: /%7ejoe/index.html
99"""
100
101good = [] # XFAIL '/a/b.html'
102bad = ['/tmp','/tmp.html','/tmp/a.html',
103 '/a%3cd.html','/a%3Cd.html','/a%2fb.html',
104 '/~joe/index.html'
105 ]
106
107RobotTest(4, doc, good, bad, 'figtree')
108RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
109
110# 6.
111doc = """
112User-agent: *
113Disallow: /tmp/
114Disallow: /a%3Cd.html
115Disallow: /a/b.html
116Disallow: /%7ejoe/index.html
117"""
118
119good = ['/tmp',] # XFAIL: '/a%2fb.html'
120bad = ['/tmp/','/tmp/a.html',
121 '/a%3cd.html','/a%3Cd.html',"/a/b.html",
Tim Peters863ac442002-04-16 01:38:40 +0000122 '/%7Ejoe/index.html']
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000123
124RobotTest(6, doc, good, bad)
125
126# From bug report #523041
127
128# 7.
129doc = """
130User-Agent: *
131Disallow: /.
132"""
133
134good = ['/foo.html']
135bad = [] # Bug report says "/" should be denied, but that is not in the RFC
136
137RobotTest(7, doc, good, bad)
138
Benjamin Petersond6313712008-07-31 16:23:04 +0000139# From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364
140
141# 8.
142doc = """
143User-agent: Googlebot
144Allow: /folder1/myfile.html
145Disallow: /folder1/
146"""
147
148good = ['/folder1/myfile.html']
149bad = ['/folder1/anotherfile.html']
150
151RobotTest(8, doc, good, bad, agent="Googlebot")
152
153# 9. This file is incorrect because "Googlebot" is a substring of
154# "Googlebot-Mobile", so test 10 works just like test 9.
155doc = """
156User-agent: Googlebot
157Disallow: /
158
159User-agent: Googlebot-Mobile
160Allow: /
161"""
162
163good = []
164bad = ['/something.jpg']
165
166RobotTest(9, doc, good, bad, agent="Googlebot")
167
168good = []
169bad = ['/something.jpg']
170
171RobotTest(10, doc, good, bad, agent="Googlebot-Mobile")
172
173# 11. Get the order correct.
174doc = """
175User-agent: Googlebot-Mobile
176Allow: /
177
178User-agent: Googlebot
179Disallow: /
180"""
181
182good = []
183bad = ['/something.jpg']
184
185RobotTest(11, doc, good, bad, agent="Googlebot")
186
187good = ['/something.jpg']
188bad = []
189
190RobotTest(12, doc, good, bad, agent="Googlebot-Mobile")
191
192
193# 13. Google also got the order wrong in #8. You need to specify the
194# URLs from more specific to more general.
195doc = """
196User-agent: Googlebot
197Allow: /folder1/myfile.html
198Disallow: /folder1/
199"""
200
201good = ['/folder1/myfile.html']
202bad = ['/folder1/anotherfile.html']
203
204RobotTest(13, doc, good, bad, agent="googlebot")
205
206
207
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000208class NetworkTestCase(unittest.TestCase):
209
210 def testPasswordProtectedSite(self):
Georg Brandl3dbca812008-07-23 16:10:53 +0000211 if not support.is_resource_enabled('network'):
212 return
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000213 # whole site is password-protected.
214 url = 'http://mueblesmoraleda.com'
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000215 parser = urllib.robotparser.RobotFileParser()
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000216 parser.set_url(url)
217 parser.read()
218 self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
219
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000220 def testPythonOrg(self):
Georg Brandl3dbca812008-07-23 16:10:53 +0000221 if not support.is_resource_enabled('network'):
222 return
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000223 parser = urllib.robotparser.RobotFileParser(
224 "http://www.python.org/robots.txt")
225 parser.read()
226 self.assertTrue(parser.can_fetch("*",
227 "http://www.python.org/robots.txt"))
228
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000229def test_main():
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000230 support.run_unittest(NetworkTestCase)
Benjamin Petersonee8712c2008-05-20 21:35:26 +0000231 support.run_unittest(tests)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000232
233if __name__=='__main__':
Georg Brandl3dbca812008-07-23 16:10:53 +0000234 support.verbose = 1
Guido van Rossumd8faa362007-04-27 19:54:29 +0000235 test_main()