blob: 26fb2909dedeb31e0dc739853d40a10b2361f9a6 [file] [log] [blame]
Barry Warsaw04f357c2002-07-23 19:04:11 +00001import unittest, StringIO, robotparser
2from test import test_support
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +00003
4class RobotTestCase(unittest.TestCase):
5 def __init__(self, index, parser, url, good, agent):
6 unittest.TestCase.__init__(self)
7 if good:
8 self.str = "RobotTest(%d, good, %s)" % (index, url)
9 else:
10 self.str = "RobotTest(%d, bad, %s)" % (index, url)
11 self.parser = parser
12 self.url = url
13 self.good = good
14 self.agent = agent
15
16 def runTest(self):
17 if isinstance(self.url, tuple):
18 agent, url = self.url
19 else:
20 url = self.url
21 agent = self.agent
22 if self.good:
Benjamin Peterson5c8da862009-06-30 22:57:08 +000023 self.assertTrue(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000024 else:
Benjamin Peterson5c8da862009-06-30 22:57:08 +000025 self.assertFalse(self.parser.can_fetch(agent, url))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000026
27 def __str__(self):
28 return self.str
29
30tests = unittest.TestSuite()
31
32def RobotTest(index, robots_txt, good_urls, bad_urls,
33 agent="test_robotparser"):
Tim Peters863ac442002-04-16 01:38:40 +000034
35 lines = StringIO.StringIO(robots_txt).readlines()
36 parser = robotparser.RobotFileParser()
37 parser.parse(lines)
38 for url in good_urls:
39 tests.addTest(RobotTestCase(index, parser, url, 1, agent))
40 for url in bad_urls:
41 tests.addTest(RobotTestCase(index, parser, url, 0, agent))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000042
43# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
44
45# 1.
46doc = """
47User-agent: *
48Disallow: /cyberworld/map/ # This is an infinite virtual URL space
49Disallow: /tmp/ # these will soon disappear
50Disallow: /foo.html
51"""
52
53good = ['/','/test.html']
54bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
55
56RobotTest(1, doc, good, bad)
57
58# 2.
59doc = """
60# robots.txt for http://www.example.com/
61
62User-agent: *
63Disallow: /cyberworld/map/ # This is an infinite virtual URL space
64
65# Cybermapper knows where to go.
66User-agent: cybermapper
67Disallow:
68
69"""
70
71good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
72bad = ['/cyberworld/map/index.html']
73
74RobotTest(2, doc, good, bad)
75
76# 3.
77doc = """
78# go away
79User-agent: *
80Disallow: /
81"""
82
83good = []
84bad = ['/cyberworld/map/index.html','/','/tmp/']
85
86RobotTest(3, doc, good, bad)
87
88# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
89
90# 4.
91doc = """
92User-agent: figtree
93Disallow: /tmp
94Disallow: /a%3cd.html
95Disallow: /a%2fb.html
96Disallow: /%7ejoe/index.html
97"""
98
99good = [] # XFAIL '/a/b.html'
100bad = ['/tmp','/tmp.html','/tmp/a.html',
101 '/a%3cd.html','/a%3Cd.html','/a%2fb.html',
102 '/~joe/index.html'
103 ]
104
105RobotTest(4, doc, good, bad, 'figtree')
106RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
107
108# 6.
109doc = """
110User-agent: *
111Disallow: /tmp/
112Disallow: /a%3Cd.html
113Disallow: /a/b.html
114Disallow: /%7ejoe/index.html
115"""
116
117good = ['/tmp',] # XFAIL: '/a%2fb.html'
118bad = ['/tmp/','/tmp/a.html',
119 '/a%3cd.html','/a%3Cd.html',"/a/b.html",
Tim Peters863ac442002-04-16 01:38:40 +0000120 '/%7Ejoe/index.html']
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000121
122RobotTest(6, doc, good, bad)
123
124# From bug report #523041
125
126# 7.
127doc = """
128User-Agent: *
129Disallow: /.
130"""
131
132good = ['/foo.html']
133bad = [] # Bug report says "/" should be denied, but that is not in the RFC
134
135RobotTest(7, doc, good, bad)
136
Skip Montanaro1ef19f02008-07-27 00:49:02 +0000137# From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364
138
139# 8.
140doc = """
141User-agent: Googlebot
142Allow: /folder1/myfile.html
143Disallow: /folder1/
144"""
145
146good = ['/folder1/myfile.html']
147bad = ['/folder1/anotherfile.html']
148
149RobotTest(8, doc, good, bad, agent="Googlebot")
150
151# 9. This file is incorrect because "Googlebot" is a substring of
152# "Googlebot-Mobile", so test 10 works just like test 9.
153doc = """
154User-agent: Googlebot
155Disallow: /
156
157User-agent: Googlebot-Mobile
158Allow: /
159"""
160
161good = []
162bad = ['/something.jpg']
163
164RobotTest(9, doc, good, bad, agent="Googlebot")
165
166good = []
167bad = ['/something.jpg']
168
169RobotTest(10, doc, good, bad, agent="Googlebot-Mobile")
170
171# 11. Get the order correct.
172doc = """
173User-agent: Googlebot-Mobile
174Allow: /
175
176User-agent: Googlebot
177Disallow: /
178"""
179
180good = []
181bad = ['/something.jpg']
182
183RobotTest(11, doc, good, bad, agent="Googlebot")
184
185good = ['/something.jpg']
186bad = []
187
188RobotTest(12, doc, good, bad, agent="Googlebot-Mobile")
189
190
191# 13. Google also got the order wrong in #8. You need to specify the
192# URLs from more specific to more general.
193doc = """
194User-agent: Googlebot
195Allow: /folder1/myfile.html
196Disallow: /folder1/
197"""
198
199good = ['/folder1/myfile.html']
200bad = ['/folder1/anotherfile.html']
201
202RobotTest(13, doc, good, bad, agent="googlebot")
203
204
205
Skip Montanaro1a413132007-08-28 23:22:52 +0000206class TestCase(unittest.TestCase):
207 def runTest(self):
208 test_support.requires('network')
209 # whole site is password-protected.
210 url = 'http://mueblesmoraleda.com'
211 parser = robotparser.RobotFileParser()
212 parser.set_url(url)
213 parser.read()
214 self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
215
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000216def test_main():
Collin Winterc2898c52007-04-25 17:29:52 +0000217 test_support.run_unittest(tests)
Skip Montanaro1a413132007-08-28 23:22:52 +0000218 TestCase().run()
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000219
220if __name__=='__main__':
Georg Brandl730c8182008-07-18 10:29:30 +0000221 test_support.verbose = 1
Collin Winterc2898c52007-04-25 17:29:52 +0000222 test_main()