blob: 48ea066682e1878d3402c4275cbcf45276ff5fae [file] [log] [blame]
Skip Montanaro663f6c22001-01-20 15:59:25 +00001""" robotparser.py
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00002
Skip Montanaro663f6c22001-01-20 15:59:25 +00003 Copyright (C) 2000 Bastian Kleineidam
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00004
Skip Montanaro663f6c22001-01-20 15:59:25 +00005 You can choose between two licenses when using this package:
6 1) GNU GPLv2
Martin v. Löwisd22368f2002-03-18 10:41:20 +00007 2) PSF license for Python 2.2
Skip Montanaro663f6c22001-01-20 15:59:25 +00008
9 The robots.txt Exclusion Protocol is implemented as specified in
10 http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000011"""
Neal Norwitz5aee5042002-05-31 14:14:06 +000012import urlparse,urllib
Skip Montanaro663f6c22001-01-20 15:59:25 +000013
Skip Montanaroe99d5ea2001-01-20 19:54:20 +000014__all__ = ["RobotFileParser"]
15
Skip Montanaro663f6c22001-01-20 15:59:25 +000016debug = 0
17
18def _debug(msg):
19 if debug: print msg
20
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000021
22class RobotFileParser:
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000023 """ This class provides a set of methods to read, parse and answer
24 questions about a single robots.txt file.
25
26 """
27
Skip Montanaro663f6c22001-01-20 15:59:25 +000028 def __init__(self, url=''):
29 self.entries = []
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000030 self.default_entry = None
Martin v. Löwis31bd5292004-08-23 20:42:35 +000031 self.disallow_all = False
32 self.allow_all = False
Skip Montanaro663f6c22001-01-20 15:59:25 +000033 self.set_url(url)
Guido van Rossum986abac1998-04-06 14:29:28 +000034 self.last_checked = 0
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000035
36 def mtime(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000037 """Returns the time the robots.txt file was last fetched.
38
39 This is useful for long-running web spiders that need to
40 check for new robots.txt files periodically.
41
42 """
Guido van Rossum986abac1998-04-06 14:29:28 +000043 return self.last_checked
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000044
45 def modified(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000046 """Sets the time the robots.txt file was last fetched to the
47 current time.
48
49 """
Guido van Rossum986abac1998-04-06 14:29:28 +000050 import time
51 self.last_checked = time.time()
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000052
53 def set_url(self, url):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000054 """Sets the URL referring to a robots.txt file."""
Guido van Rossum986abac1998-04-06 14:29:28 +000055 self.url = url
Skip Montanaro663f6c22001-01-20 15:59:25 +000056 self.host, self.path = urlparse.urlparse(url)[1:3]
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000057
58 def read(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000059 """Reads the robots.txt URL and feeds it to the parser."""
Skip Montanaro5bba2312001-02-12 20:58:30 +000060 opener = URLopener()
61 f = opener.open(self.url)
Martin v. Löwisd22368f2002-03-18 10:41:20 +000062 lines = []
63 line = f.readline()
64 while line:
65 lines.append(line.strip())
66 line = f.readline()
Skip Montanaro5bba2312001-02-12 20:58:30 +000067 self.errcode = opener.errcode
68 if self.errcode == 401 or self.errcode == 403:
Martin v. Löwis31bd5292004-08-23 20:42:35 +000069 self.disallow_all = True
Skip Montanaro5bba2312001-02-12 20:58:30 +000070 _debug("disallow all")
71 elif self.errcode >= 400:
Martin v. Löwis31bd5292004-08-23 20:42:35 +000072 self.allow_all = True
Skip Montanaro5bba2312001-02-12 20:58:30 +000073 _debug("allow all")
74 elif self.errcode == 200 and lines:
75 _debug("parse lines")
76 self.parse(lines)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000077
Martin v. Löwis73f570b2002-03-18 10:43:18 +000078 def _add_entry(self, entry):
79 if "*" in entry.useragents:
80 # the default entry is considered last
81 self.default_entry = entry
82 else:
83 self.entries.append(entry)
84
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000085 def parse(self, lines):
Raymond Hettinger2d95f1a2004-03-13 20:27:23 +000086 """parse the input lines from a robots.txt file.
Tim Petersdfc538a2001-01-21 04:49:16 +000087 We allow that a user-agent: line is not preceded by
88 one or more blank lines."""
Skip Montanaro663f6c22001-01-20 15:59:25 +000089 state = 0
90 linenumber = 0
91 entry = Entry()
Tim Petersdfc538a2001-01-21 04:49:16 +000092
Guido van Rossum986abac1998-04-06 14:29:28 +000093 for line in lines:
Skip Montanaro663f6c22001-01-20 15:59:25 +000094 linenumber = linenumber + 1
95 if not line:
96 if state==1:
97 _debug("line %d: warning: you should insert"
Tim Petersdfc538a2001-01-21 04:49:16 +000098 " allow: or disallow: directives below any"
99 " user-agent: line" % linenumber)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000100 entry = Entry()
101 state = 0
102 elif state==2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000103 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000104 entry = Entry()
105 state = 0
Guido van Rossum986abac1998-04-06 14:29:28 +0000106 # remove optional comment and strip line
Eric S. Raymond141971f2001-02-09 08:40:40 +0000107 i = line.find('#')
Skip Montanaro663f6c22001-01-20 15:59:25 +0000108 if i>=0:
109 line = line[:i]
Eric S. Raymond141971f2001-02-09 08:40:40 +0000110 line = line.strip()
Guido van Rossum986abac1998-04-06 14:29:28 +0000111 if not line:
112 continue
Eric S. Raymond141971f2001-02-09 08:40:40 +0000113 line = line.split(':', 1)
Guido van Rossum986abac1998-04-06 14:29:28 +0000114 if len(line) == 2:
Eric S. Raymond141971f2001-02-09 08:40:40 +0000115 line[0] = line[0].strip().lower()
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000116 line[1] = urllib.unquote(line[1].strip())
Skip Montanaro663f6c22001-01-20 15:59:25 +0000117 if line[0] == "user-agent":
118 if state==2:
119 _debug("line %d: warning: you should insert a blank"
Tim Petersdfc538a2001-01-21 04:49:16 +0000120 " line before any user-agent"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000121 " directive" % linenumber)
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000122 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000123 entry = Entry()
124 entry.useragents.append(line[1])
125 state = 1
126 elif line[0] == "disallow":
127 if state==0:
128 _debug("line %d: error: you must insert a user-agent:"
Tim Petersdfc538a2001-01-21 04:49:16 +0000129 " directive before this line" % linenumber)
Guido van Rossum986abac1998-04-06 14:29:28 +0000130 else:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000131 entry.rulelines.append(RuleLine(line[1], False))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000132 state = 2
133 elif line[0] == "allow":
134 if state==0:
135 _debug("line %d: error: you must insert a user-agent:"
Tim Petersdfc538a2001-01-21 04:49:16 +0000136 " directive before this line" % linenumber)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000137 else:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000138 entry.rulelines.append(RuleLine(line[1], True))
Guido van Rossum986abac1998-04-06 14:29:28 +0000139 else:
Skip Montanaro663f6c22001-01-20 15:59:25 +0000140 _debug("line %d: warning: unknown key %s" % (linenumber,
141 line[0]))
142 else:
143 _debug("line %d: error: malformed line %s"%(linenumber, line))
144 if state==2:
145 self.entries.append(entry)
146 _debug("Parsed rules:\n%s" % str(self))
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000147
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000148
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000149 def can_fetch(self, useragent, url):
150 """using the parsed robots.txt decide if useragent can fetch url"""
Raymond Hettinger2d95f1a2004-03-13 20:27:23 +0000151 _debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" %
Skip Montanaro5bba2312001-02-12 20:58:30 +0000152 (useragent, url))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000153 if self.disallow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000154 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000155 if self.allow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000156 return True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000157 # search for given user agent matches
158 # the first match counts
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000159 url = urllib.quote(urlparse.urlparse(urllib.unquote(url))[2]) or "/"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000160 for entry in self.entries:
161 if entry.applies_to(useragent):
162 return entry.allowance(url)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000163 # try the default entry last
164 if self.default_entry:
165 return self.default_entry.allowance(url)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000166 # agent not found ==> access granted
Tim Petersbc0e9102002-04-04 22:55:58 +0000167 return True
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000168
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000169
Skip Montanaro663f6c22001-01-20 15:59:25 +0000170 def __str__(self):
171 ret = ""
172 for entry in self.entries:
173 ret = ret + str(entry) + "\n"
174 return ret
175
176
177class RuleLine:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000178 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
179 (allowance==False) followed by a path."""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000180 def __init__(self, path, allowance):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000181 if path == '' and not allowance:
182 # an empty value means allow all
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000183 allowance = True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000184 self.path = urllib.quote(path)
185 self.allowance = allowance
186
187 def applies_to(self, filename):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000188 return self.path=="*" or filename.startswith(self.path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000189
190 def __str__(self):
191 return (self.allowance and "Allow" or "Disallow")+": "+self.path
192
193
194class Entry:
195 """An entry has one or more user-agents and zero or more rulelines"""
196 def __init__(self):
197 self.useragents = []
198 self.rulelines = []
199
200 def __str__(self):
201 ret = ""
202 for agent in self.useragents:
203 ret = ret + "User-agent: "+agent+"\n"
204 for line in self.rulelines:
205 ret = ret + str(line) + "\n"
206 return ret
207
208 def applies_to(self, useragent):
Skip Montanaro5bba2312001-02-12 20:58:30 +0000209 """check if this entry applies to the specified agent"""
210 # split the name token and make it lower case
211 useragent = useragent.split("/")[0].lower()
Skip Montanaro663f6c22001-01-20 15:59:25 +0000212 for agent in self.useragents:
Skip Montanaro5bba2312001-02-12 20:58:30 +0000213 if agent=='*':
214 # we have the catch-all agent
Tim Petersbc0e9102002-04-04 22:55:58 +0000215 return True
Skip Montanaro5bba2312001-02-12 20:58:30 +0000216 agent = agent.lower()
Raymond Hettingerbac788a2004-05-04 09:21:43 +0000217 if agent in useragent:
Tim Petersbc0e9102002-04-04 22:55:58 +0000218 return True
219 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000220
221 def allowance(self, filename):
222 """Preconditions:
223 - our agent applies to this entry
224 - filename is URL decoded"""
225 for line in self.rulelines:
Skip Montanaro5bba2312001-02-12 20:58:30 +0000226 _debug((filename, str(line), line.allowance))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000227 if line.applies_to(filename):
228 return line.allowance
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000229 return True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000230
Skip Montanaro5bba2312001-02-12 20:58:30 +0000231class URLopener(urllib.FancyURLopener):
232 def __init__(self, *args):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000233 urllib.FancyURLopener.__init__(self, *args)
Skip Montanaro5bba2312001-02-12 20:58:30 +0000234 self.errcode = 200
Tim Peters0e6d2132001-02-15 23:56:39 +0000235
Skip Montanaro5bba2312001-02-12 20:58:30 +0000236 def http_error_default(self, url, fp, errcode, errmsg, headers):
237 self.errcode = errcode
238 return urllib.FancyURLopener.http_error_default(self, url, fp, errcode,
239 errmsg, headers)
240
Skip Montanaro5bba2312001-02-12 20:58:30 +0000241def _check(a,b):
242 if not b:
243 ac = "access denied"
244 else:
245 ac = "access allowed"
246 if a!=b:
247 print "failed"
248 else:
249 print "ok (%s)" % ac
250 print
Skip Montanaro663f6c22001-01-20 15:59:25 +0000251
252def _test():
253 global debug
Skip Montanaro663f6c22001-01-20 15:59:25 +0000254 rp = RobotFileParser()
255 debug = 1
Skip Montanaro5bba2312001-02-12 20:58:30 +0000256
257 # robots.txt that exists, gotten to by redirection
258 rp.set_url('http://www.musi-cal.com/robots.txt')
259 rp.read()
260
261 # test for re.escape
262 _check(rp.can_fetch('*', 'http://www.musi-cal.com/'), 1)
263 # this should match the first rule, which is a disallow
264 _check(rp.can_fetch('', 'http://www.musi-cal.com/'), 0)
265 # various cherry pickers
266 _check(rp.can_fetch('CherryPickerSE',
Skip Montanaro663f6c22001-01-20 15:59:25 +0000267 'http://www.musi-cal.com/cgi-bin/event-search'
Skip Montanaro5bba2312001-02-12 20:58:30 +0000268 '?city=San+Francisco'), 0)
269 _check(rp.can_fetch('CherryPickerSE/1.0',
270 'http://www.musi-cal.com/cgi-bin/event-search'
271 '?city=San+Francisco'), 0)
272 _check(rp.can_fetch('CherryPickerSE/1.5',
273 'http://www.musi-cal.com/cgi-bin/event-search'
274 '?city=San+Francisco'), 0)
275 # case sensitivity
276 _check(rp.can_fetch('ExtractorPro', 'http://www.musi-cal.com/blubba'), 0)
277 _check(rp.can_fetch('extractorpro', 'http://www.musi-cal.com/blubba'), 0)
278 # substring test
279 _check(rp.can_fetch('toolpak/1.1', 'http://www.musi-cal.com/blubba'), 0)
280 # tests for catch-all * agent
281 _check(rp.can_fetch('spam', 'http://www.musi-cal.com/search'), 0)
282 _check(rp.can_fetch('spam', 'http://www.musi-cal.com/Musician/me'), 1)
283 _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1)
284 _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1)
285
286 # robots.txt that does not exist
287 rp.set_url('http://www.lycos.com/robots.txt')
288 rp.read()
289 _check(rp.can_fetch('Mozilla', 'http://www.lycos.com/search'), 1)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000290
291if __name__ == '__main__':
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000292 _test()