blob: fbb02bcbc2f185eb4c9192634b6b496937a61701 [file] [log] [blame]
Guido van Rossum34d19282007-08-09 01:03:29 +00001import io
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002import unittest
3import urllib.robotparser
Benjamin Petersonee8712c2008-05-20 21:35:26 +00004from test import support
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +00005
6class RobotTestCase(unittest.TestCase):
7 def __init__(self, index, parser, url, good, agent):
8 unittest.TestCase.__init__(self)
9 if good:
10 self.str = "RobotTest(%d, good, %s)" % (index, url)
11 else:
12 self.str = "RobotTest(%d, bad, %s)" % (index, url)
13 self.parser = parser
14 self.url = url
15 self.good = good
16 self.agent = agent
17
18 def runTest(self):
19 if isinstance(self.url, tuple):
20 agent, url = self.url
21 else:
22 url = self.url
23 agent = self.agent
24 if self.good:
25 self.failUnless(self.parser.can_fetch(agent, url))
26 else:
27 self.failIf(self.parser.can_fetch(agent, url))
28
29 def __str__(self):
30 return self.str
31
32tests = unittest.TestSuite()
33
34def RobotTest(index, robots_txt, good_urls, bad_urls,
35 agent="test_robotparser"):
Tim Peters863ac442002-04-16 01:38:40 +000036
Guido van Rossum34d19282007-08-09 01:03:29 +000037 lines = io.StringIO(robots_txt).readlines()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000038 parser = urllib.robotparser.RobotFileParser()
Tim Peters863ac442002-04-16 01:38:40 +000039 parser.parse(lines)
40 for url in good_urls:
41 tests.addTest(RobotTestCase(index, parser, url, 1, agent))
42 for url in bad_urls:
43 tests.addTest(RobotTestCase(index, parser, url, 0, agent))
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000044
45# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
46
47# 1.
48doc = """
49User-agent: *
50Disallow: /cyberworld/map/ # This is an infinite virtual URL space
51Disallow: /tmp/ # these will soon disappear
52Disallow: /foo.html
53"""
54
55good = ['/','/test.html']
56bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
57
58RobotTest(1, doc, good, bad)
59
60# 2.
61doc = """
62# robots.txt for http://www.example.com/
63
64User-agent: *
65Disallow: /cyberworld/map/ # This is an infinite virtual URL space
66
67# Cybermapper knows where to go.
68User-agent: cybermapper
69Disallow:
70
71"""
72
73good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
74bad = ['/cyberworld/map/index.html']
75
76RobotTest(2, doc, good, bad)
77
78# 3.
79doc = """
80# go away
81User-agent: *
82Disallow: /
83"""
84
85good = []
86bad = ['/cyberworld/map/index.html','/','/tmp/']
87
88RobotTest(3, doc, good, bad)
89
90# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
91
92# 4.
93doc = """
94User-agent: figtree
95Disallow: /tmp
96Disallow: /a%3cd.html
97Disallow: /a%2fb.html
98Disallow: /%7ejoe/index.html
99"""
100
101good = [] # XFAIL '/a/b.html'
102bad = ['/tmp','/tmp.html','/tmp/a.html',
103 '/a%3cd.html','/a%3Cd.html','/a%2fb.html',
104 '/~joe/index.html'
105 ]
106
107RobotTest(4, doc, good, bad, 'figtree')
108RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
109
110# 6.
111doc = """
112User-agent: *
113Disallow: /tmp/
114Disallow: /a%3Cd.html
115Disallow: /a/b.html
116Disallow: /%7ejoe/index.html
117"""
118
119good = ['/tmp',] # XFAIL: '/a%2fb.html'
120bad = ['/tmp/','/tmp/a.html',
121 '/a%3cd.html','/a%3Cd.html',"/a/b.html",
Tim Peters863ac442002-04-16 01:38:40 +0000122 '/%7Ejoe/index.html']
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000123
124RobotTest(6, doc, good, bad)
125
126# From bug report #523041
127
128# 7.
129doc = """
130User-Agent: *
131Disallow: /.
132"""
133
134good = ['/foo.html']
135bad = [] # Bug report says "/" should be denied, but that is not in the RFC
136
137RobotTest(7, doc, good, bad)
138
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000139class TestCase(unittest.TestCase):
140 def runTest(self):
Benjamin Petersonee8712c2008-05-20 21:35:26 +0000141 support.requires('network')
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000142 # whole site is password-protected.
143 url = 'http://mueblesmoraleda.com'
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000144 parser = urllib.robotparser.RobotFileParser()
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000145 parser.set_url(url)
146 parser.read()
147 self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
148
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000149def test_main():
Benjamin Petersonee8712c2008-05-20 21:35:26 +0000150 support.run_unittest(tests)
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000151 TestCase().run()
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000152
153if __name__=='__main__':
Benjamin Petersonee8712c2008-05-20 21:35:26 +0000154 support.Verbose = 1
Guido van Rossumd8faa362007-04-27 19:54:29 +0000155 test_main()