blob: d1ef46019bb9d74058cbad89e8ca317846bced17 [file] [log] [blame]
Skip Montanaro663f6c22001-01-20 15:59:25 +00001""" robotparser.py
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00002
Skip Montanaro663f6c22001-01-20 15:59:25 +00003 Copyright (C) 2000 Bastian Kleineidam
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00004
Skip Montanaro663f6c22001-01-20 15:59:25 +00005 You can choose between two licenses when using this package:
6 1) GNU GPLv2
Martin v. Löwisd22368f2002-03-18 10:41:20 +00007 2) PSF license for Python 2.2
Skip Montanaro663f6c22001-01-20 15:59:25 +00008
9 The robots.txt Exclusion Protocol is implemented as specified in
10 http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000011"""
Christian Heimes81ee3ef2008-05-04 22:42:01 +000012import urlparse
13import urllib
Skip Montanaro663f6c22001-01-20 15:59:25 +000014
Skip Montanaroe99d5ea2001-01-20 19:54:20 +000015__all__ = ["RobotFileParser"]
16
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000017class RobotFileParser:
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000018 """ This class provides a set of methods to read, parse and answer
19 questions about a single robots.txt file.
20
21 """
22
Skip Montanaro663f6c22001-01-20 15:59:25 +000023 def __init__(self, url=''):
24 self.entries = []
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000025 self.default_entry = None
Martin v. Löwis31bd5292004-08-23 20:42:35 +000026 self.disallow_all = False
27 self.allow_all = False
Skip Montanaro663f6c22001-01-20 15:59:25 +000028 self.set_url(url)
Guido van Rossum986abac1998-04-06 14:29:28 +000029 self.last_checked = 0
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000030
31 def mtime(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000032 """Returns the time the robots.txt file was last fetched.
33
34 This is useful for long-running web spiders that need to
35 check for new robots.txt files periodically.
36
37 """
Guido van Rossum986abac1998-04-06 14:29:28 +000038 return self.last_checked
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000039
40 def modified(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000041 """Sets the time the robots.txt file was last fetched to the
42 current time.
43
44 """
Guido van Rossum986abac1998-04-06 14:29:28 +000045 import time
46 self.last_checked = time.time()
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000047
48 def set_url(self, url):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000049 """Sets the URL referring to a robots.txt file."""
Guido van Rossum986abac1998-04-06 14:29:28 +000050 self.url = url
Skip Montanaro663f6c22001-01-20 15:59:25 +000051 self.host, self.path = urlparse.urlparse(url)[1:3]
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000052
53 def read(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000054 """Reads the robots.txt URL and feeds it to the parser."""
Skip Montanaro5bba2312001-02-12 20:58:30 +000055 opener = URLopener()
56 f = opener.open(self.url)
Martin v. Löwisd22368f2002-03-18 10:41:20 +000057 lines = []
58 line = f.readline()
59 while line:
60 lines.append(line.strip())
61 line = f.readline()
Skip Montanaro5bba2312001-02-12 20:58:30 +000062 self.errcode = opener.errcode
Guido van Rossumd8faa362007-04-27 19:54:29 +000063 if self.errcode in (401, 403):
Martin v. Löwis31bd5292004-08-23 20:42:35 +000064 self.disallow_all = True
Skip Montanaro5bba2312001-02-12 20:58:30 +000065 elif self.errcode >= 400:
Martin v. Löwis31bd5292004-08-23 20:42:35 +000066 self.allow_all = True
Skip Montanaro5bba2312001-02-12 20:58:30 +000067 elif self.errcode == 200 and lines:
Skip Montanaro5bba2312001-02-12 20:58:30 +000068 self.parse(lines)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000069
Martin v. Löwis73f570b2002-03-18 10:43:18 +000070 def _add_entry(self, entry):
71 if "*" in entry.useragents:
72 # the default entry is considered last
73 self.default_entry = entry
74 else:
75 self.entries.append(entry)
76
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000077 def parse(self, lines):
Raymond Hettinger2d95f1a2004-03-13 20:27:23 +000078 """parse the input lines from a robots.txt file.
Tim Petersdfc538a2001-01-21 04:49:16 +000079 We allow that a user-agent: line is not preceded by
80 one or more blank lines."""
Skip Montanaro663f6c22001-01-20 15:59:25 +000081 state = 0
82 linenumber = 0
83 entry = Entry()
Tim Petersdfc538a2001-01-21 04:49:16 +000084
Guido van Rossum986abac1998-04-06 14:29:28 +000085 for line in lines:
Skip Montanaro663f6c22001-01-20 15:59:25 +000086 linenumber = linenumber + 1
87 if not line:
Christian Heimes81ee3ef2008-05-04 22:42:01 +000088 if state == 1:
Skip Montanaro663f6c22001-01-20 15:59:25 +000089 entry = Entry()
90 state = 0
Christian Heimes81ee3ef2008-05-04 22:42:01 +000091 elif state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +000092 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +000093 entry = Entry()
94 state = 0
Guido van Rossum986abac1998-04-06 14:29:28 +000095 # remove optional comment and strip line
Eric S. Raymond141971f2001-02-09 08:40:40 +000096 i = line.find('#')
Christian Heimes81ee3ef2008-05-04 22:42:01 +000097 if i >= 0:
Skip Montanaro663f6c22001-01-20 15:59:25 +000098 line = line[:i]
Eric S. Raymond141971f2001-02-09 08:40:40 +000099 line = line.strip()
Guido van Rossum986abac1998-04-06 14:29:28 +0000100 if not line:
101 continue
Eric S. Raymond141971f2001-02-09 08:40:40 +0000102 line = line.split(':', 1)
Guido van Rossum986abac1998-04-06 14:29:28 +0000103 if len(line) == 2:
Eric S. Raymond141971f2001-02-09 08:40:40 +0000104 line[0] = line[0].strip().lower()
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000105 line[1] = urllib.unquote(line[1].strip())
Skip Montanaro663f6c22001-01-20 15:59:25 +0000106 if line[0] == "user-agent":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000107 if state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000108 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000109 entry = Entry()
110 entry.useragents.append(line[1])
111 state = 1
112 elif line[0] == "disallow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000113 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000114 entry.rulelines.append(RuleLine(line[1], False))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000115 state = 2
116 elif line[0] == "allow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000117 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000118 entry.rulelines.append(RuleLine(line[1], True))
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000119 if state == 2:
Skip Montanaro663f6c22001-01-20 15:59:25 +0000120 self.entries.append(entry)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000121
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000122
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000123 def can_fetch(self, useragent, url):
124 """using the parsed robots.txt decide if useragent can fetch url"""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000125 if self.disallow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000126 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000127 if self.allow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000128 return True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000129 # search for given user agent matches
130 # the first match counts
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000131 url = urllib.quote(urlparse.urlparse(urllib.unquote(url))[2]) or "/"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000132 for entry in self.entries:
133 if entry.applies_to(useragent):
134 return entry.allowance(url)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000135 # try the default entry last
136 if self.default_entry:
137 return self.default_entry.allowance(url)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000138 # agent not found ==> access granted
Tim Petersbc0e9102002-04-04 22:55:58 +0000139 return True
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000140
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000141
Skip Montanaro663f6c22001-01-20 15:59:25 +0000142 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000143 return ''.join([str(entry) + "\n" for entry in self.entries])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000144
145
146class RuleLine:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000147 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
148 (allowance==False) followed by a path."""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000149 def __init__(self, path, allowance):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000150 if path == '' and not allowance:
151 # an empty value means allow all
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000152 allowance = True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000153 self.path = urllib.quote(path)
154 self.allowance = allowance
155
156 def applies_to(self, filename):
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000157 return self.path == "*" or filename.startswith(self.path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000158
159 def __str__(self):
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000160 return (self.allowance and "Allow" or "Disallow") + ": " + self.path
Skip Montanaro663f6c22001-01-20 15:59:25 +0000161
162
163class Entry:
164 """An entry has one or more user-agents and zero or more rulelines"""
165 def __init__(self):
166 self.useragents = []
167 self.rulelines = []
168
169 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000170 ret = []
Skip Montanaro663f6c22001-01-20 15:59:25 +0000171 for agent in self.useragents:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000172 ret.extend(["User-agent: ", agent, "\n"])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000173 for line in self.rulelines:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000174 ret.extend([str(line), "\n"])
175 return ''.join(ret)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000176
177 def applies_to(self, useragent):
Skip Montanaro5bba2312001-02-12 20:58:30 +0000178 """check if this entry applies to the specified agent"""
179 # split the name token and make it lower case
180 useragent = useragent.split("/")[0].lower()
Skip Montanaro663f6c22001-01-20 15:59:25 +0000181 for agent in self.useragents:
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000182 if agent == '*':
Skip Montanaro5bba2312001-02-12 20:58:30 +0000183 # we have the catch-all agent
Tim Petersbc0e9102002-04-04 22:55:58 +0000184 return True
Skip Montanaro5bba2312001-02-12 20:58:30 +0000185 agent = agent.lower()
Raymond Hettingerbac788a2004-05-04 09:21:43 +0000186 if agent in useragent:
Tim Petersbc0e9102002-04-04 22:55:58 +0000187 return True
188 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000189
190 def allowance(self, filename):
191 """Preconditions:
192 - our agent applies to this entry
193 - filename is URL decoded"""
194 for line in self.rulelines:
195 if line.applies_to(filename):
196 return line.allowance
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000197 return True
Skip Montanaro663f6c22001-01-20 15:59:25 +0000198
Skip Montanaro5bba2312001-02-12 20:58:30 +0000199class URLopener(urllib.FancyURLopener):
200 def __init__(self, *args):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000201 urllib.FancyURLopener.__init__(self, *args)
Skip Montanaro5bba2312001-02-12 20:58:30 +0000202 self.errcode = 200
Tim Peters0e6d2132001-02-15 23:56:39 +0000203
Thomas Wouters47b49bf2007-08-30 22:15:33 +0000204 def prompt_user_passwd(self, host, realm):
205 ## If robots.txt file is accessible only with a password,
206 ## we act as if the file wasn't there.
207 return None, None
208
Skip Montanaro5bba2312001-02-12 20:58:30 +0000209 def http_error_default(self, url, fp, errcode, errmsg, headers):
210 self.errcode = errcode
211 return urllib.FancyURLopener.http_error_default(self, url, fp, errcode,
212 errmsg, headers)