blob: c58565e39451461f03dc7d9fb652554c96615f00 [file] [log] [blame]
Skip Montanaro663f6c22001-01-20 15:59:25 +00001""" robotparser.py
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00002
Skip Montanaro663f6c22001-01-20 15:59:25 +00003 Copyright (C) 2000 Bastian Kleineidam
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00004
Skip Montanaro663f6c22001-01-20 15:59:25 +00005 You can choose between two licenses when using this package:
6 1) GNU GPLv2
Martin v. Löwisd22368f2002-03-18 10:41:20 +00007 2) PSF license for Python 2.2
Skip Montanaro663f6c22001-01-20 15:59:25 +00008
9 The robots.txt Exclusion Protocol is implemented as specified in
Raymond Hettinger122541b2014-05-12 21:56:33 -070010 http://www.robotstxt.org/norobots-rfc.txt
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000011"""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000012
Berker Peksag960e8482015-10-08 12:27:06 +030013import collections
14import urllib.parse
15import urllib.request
Skip Montanaro663f6c22001-01-20 15:59:25 +000016
Skip Montanaroe99d5ea2001-01-20 19:54:20 +000017__all__ = ["RobotFileParser"]
18
Berker Peksag3df02db2017-11-24 02:40:26 +030019RequestRate = collections.namedtuple("RequestRate", "requests seconds")
20
21
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000022class RobotFileParser:
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000023 """ This class provides a set of methods to read, parse and answer
24 questions about a single robots.txt file.
25
26 """
27
Skip Montanaro663f6c22001-01-20 15:59:25 +000028 def __init__(self, url=''):
29 self.entries = []
Christopher Beacham5db5c062018-05-16 07:52:07 -070030 self.sitemaps = []
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000031 self.default_entry = None
Martin v. Löwis31bd5292004-08-23 20:42:35 +000032 self.disallow_all = False
33 self.allow_all = False
Skip Montanaro663f6c22001-01-20 15:59:25 +000034 self.set_url(url)
Guido van Rossum986abac1998-04-06 14:29:28 +000035 self.last_checked = 0
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000036
37 def mtime(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000038 """Returns the time the robots.txt file was last fetched.
39
40 This is useful for long-running web spiders that need to
41 check for new robots.txt files periodically.
42
43 """
Guido van Rossum986abac1998-04-06 14:29:28 +000044 return self.last_checked
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000045
46 def modified(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000047 """Sets the time the robots.txt file was last fetched to the
48 current time.
49
50 """
Guido van Rossum986abac1998-04-06 14:29:28 +000051 import time
52 self.last_checked = time.time()
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000053
54 def set_url(self, url):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000055 """Sets the URL referring to a robots.txt file."""
Guido van Rossum986abac1998-04-06 14:29:28 +000056 self.url = url
Jeremy Hylton1afc1692008-06-18 20:49:58 +000057 self.host, self.path = urllib.parse.urlparse(url)[1:3]
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000058
59 def read(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000060 """Reads the robots.txt URL and feeds it to the parser."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000061 try:
62 f = urllib.request.urlopen(self.url)
63 except urllib.error.HTTPError as err:
64 if err.code in (401, 403):
65 self.disallow_all = True
Raymond Hettinger122541b2014-05-12 21:56:33 -070066 elif err.code >= 400 and err.code < 500:
Jeremy Hylton1afc1692008-06-18 20:49:58 +000067 self.allow_all = True
68 else:
Jeremy Hylton73fd46d2008-07-18 20:59:44 +000069 raw = f.read()
70 self.parse(raw.decode("utf-8").splitlines())
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000071
Martin v. Löwis73f570b2002-03-18 10:43:18 +000072 def _add_entry(self, entry):
73 if "*" in entry.useragents:
74 # the default entry is considered last
Georg Brandl0a0fc072010-07-29 17:55:01 +000075 if self.default_entry is None:
76 # the first default entry wins
77 self.default_entry = entry
Martin v. Löwis73f570b2002-03-18 10:43:18 +000078 else:
79 self.entries.append(entry)
80
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000081 def parse(self, lines):
Jeremy Hylton1afc1692008-06-18 20:49:58 +000082 """Parse the input lines from a robots.txt file.
83
84 We allow that a user-agent: line is not preceded by
85 one or more blank lines.
86 """
Benjamin Petersond6313712008-07-31 16:23:04 +000087 # states:
88 # 0: start state
89 # 1: saw user-agent line
90 # 2: saw an allow or disallow line
Skip Montanaro663f6c22001-01-20 15:59:25 +000091 state = 0
Skip Montanaro663f6c22001-01-20 15:59:25 +000092 entry = Entry()
Tim Petersdfc538a2001-01-21 04:49:16 +000093
Raymond Hettinger122541b2014-05-12 21:56:33 -070094 self.modified()
Guido van Rossum986abac1998-04-06 14:29:28 +000095 for line in lines:
Skip Montanaro663f6c22001-01-20 15:59:25 +000096 if not line:
Christian Heimes81ee3ef2008-05-04 22:42:01 +000097 if state == 1:
Skip Montanaro663f6c22001-01-20 15:59:25 +000098 entry = Entry()
99 state = 0
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000100 elif state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000101 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000102 entry = Entry()
103 state = 0
Guido van Rossum986abac1998-04-06 14:29:28 +0000104 # remove optional comment and strip line
Eric S. Raymond141971f2001-02-09 08:40:40 +0000105 i = line.find('#')
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000106 if i >= 0:
Skip Montanaro663f6c22001-01-20 15:59:25 +0000107 line = line[:i]
Eric S. Raymond141971f2001-02-09 08:40:40 +0000108 line = line.strip()
Guido van Rossum986abac1998-04-06 14:29:28 +0000109 if not line:
110 continue
Eric S. Raymond141971f2001-02-09 08:40:40 +0000111 line = line.split(':', 1)
Guido van Rossum986abac1998-04-06 14:29:28 +0000112 if len(line) == 2:
Eric S. Raymond141971f2001-02-09 08:40:40 +0000113 line[0] = line[0].strip().lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000114 line[1] = urllib.parse.unquote(line[1].strip())
Skip Montanaro663f6c22001-01-20 15:59:25 +0000115 if line[0] == "user-agent":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000116 if state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000117 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000118 entry = Entry()
119 entry.useragents.append(line[1])
120 state = 1
121 elif line[0] == "disallow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000122 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000123 entry.rulelines.append(RuleLine(line[1], False))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000124 state = 2
125 elif line[0] == "allow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000126 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000127 entry.rulelines.append(RuleLine(line[1], True))
Benjamin Petersond6313712008-07-31 16:23:04 +0000128 state = 2
Berker Peksag960e8482015-10-08 12:27:06 +0300129 elif line[0] == "crawl-delay":
130 if state != 0:
131 # before trying to convert to int we need to make
132 # sure that robots.txt has valid syntax otherwise
133 # it will crash
134 if line[1].strip().isdigit():
135 entry.delay = int(line[1])
136 state = 2
137 elif line[0] == "request-rate":
138 if state != 0:
139 numbers = line[1].split('/')
140 # check if all values are sane
141 if (len(numbers) == 2 and numbers[0].strip().isdigit()
142 and numbers[1].strip().isdigit()):
Berker Peksag3df02db2017-11-24 02:40:26 +0300143 entry.req_rate = RequestRate(int(numbers[0]), int(numbers[1]))
Berker Peksag960e8482015-10-08 12:27:06 +0300144 state = 2
Christopher Beacham5db5c062018-05-16 07:52:07 -0700145 elif line[0] == "sitemap":
146 # According to http://www.sitemaps.org/protocol.html
147 # "This directive is independent of the user-agent line,
148 # so it doesn't matter where you place it in your file."
149 # Therefore we do not change the state of the parser.
150 self.sitemaps.append(line[1])
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000151 if state == 2:
Georg Brandl0a0fc072010-07-29 17:55:01 +0000152 self._add_entry(entry)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000153
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000154 def can_fetch(self, useragent, url):
155 """using the parsed robots.txt decide if useragent can fetch url"""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000156 if self.disallow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000157 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000158 if self.allow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000159 return True
Raymond Hettinger122541b2014-05-12 21:56:33 -0700160 # Until the robots.txt file has been read or found not
161 # to exist, we must assume that no url is allowable.
Martin Panterf05641642016-05-08 13:48:10 +0000162 # This prevents false positives when a user erroneously
Raymond Hettinger122541b2014-05-12 21:56:33 -0700163 # calls can_fetch() before calling read().
164 if not self.last_checked:
165 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000166 # search for given user agent matches
167 # the first match counts
Senthil Kumaran3f8ab962010-07-28 16:27:56 +0000168 parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
169 url = urllib.parse.urlunparse(('','',parsed_url.path,
170 parsed_url.params,parsed_url.query, parsed_url.fragment))
171 url = urllib.parse.quote(url)
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000172 if not url:
173 url = "/"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000174 for entry in self.entries:
175 if entry.applies_to(useragent):
176 return entry.allowance(url)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000177 # try the default entry last
178 if self.default_entry:
179 return self.default_entry.allowance(url)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000180 # agent not found ==> access granted
Tim Petersbc0e9102002-04-04 22:55:58 +0000181 return True
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000182
Berker Peksag960e8482015-10-08 12:27:06 +0300183 def crawl_delay(self, useragent):
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300184 if not self.mtime():
185 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300186 for entry in self.entries:
187 if entry.applies_to(useragent):
188 return entry.delay
Rémi Lapeyre8047e0e2019-06-16 08:48:57 +0200189 if self.default_entry:
190 return self.default_entry.delay
191 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300192
193 def request_rate(self, useragent):
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300194 if not self.mtime():
195 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300196 for entry in self.entries:
197 if entry.applies_to(useragent):
198 return entry.req_rate
Rémi Lapeyre8047e0e2019-06-16 08:48:57 +0200199 if self.default_entry:
200 return self.default_entry.req_rate
201 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300202
Christopher Beacham5db5c062018-05-16 07:52:07 -0700203 def site_maps(self):
204 if not self.sitemaps:
205 return None
206 return self.sitemaps
207
Skip Montanaro663f6c22001-01-20 15:59:25 +0000208 def __str__(self):
Michael Lazarbd08a0a2018-05-14 10:10:41 -0400209 entries = self.entries
210 if self.default_entry is not None:
211 entries = entries + [self.default_entry]
212 return '\n\n'.join(map(str, entries))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000213
214
215class RuleLine:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000216 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
217 (allowance==False) followed by a path."""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000218 def __init__(self, path, allowance):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000219 if path == '' and not allowance:
220 # an empty value means allow all
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000221 allowance = True
Senthil Kumaranc70a6ae2013-05-29 05:54:31 -0700222 path = urllib.parse.urlunparse(urllib.parse.urlparse(path))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000223 self.path = urllib.parse.quote(path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000224 self.allowance = allowance
225
226 def applies_to(self, filename):
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000227 return self.path == "*" or filename.startswith(self.path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000228
229 def __str__(self):
Raymond Hettinger38acd4c2014-05-12 22:22:46 -0700230 return ("Allow" if self.allowance else "Disallow") + ": " + self.path
Skip Montanaro663f6c22001-01-20 15:59:25 +0000231
232
233class Entry:
234 """An entry has one or more user-agents and zero or more rulelines"""
235 def __init__(self):
236 self.useragents = []
237 self.rulelines = []
Berker Peksag960e8482015-10-08 12:27:06 +0300238 self.delay = None
239 self.req_rate = None
Skip Montanaro663f6c22001-01-20 15:59:25 +0000240
241 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000242 ret = []
Skip Montanaro663f6c22001-01-20 15:59:25 +0000243 for agent in self.useragents:
Michael Lazarbd08a0a2018-05-14 10:10:41 -0400244 ret.append(f"User-agent: {agent}")
245 if self.delay is not None:
246 ret.append(f"Crawl-delay: {self.delay}")
247 if self.req_rate is not None:
248 rate = self.req_rate
249 ret.append(f"Request-rate: {rate.requests}/{rate.seconds}")
250 ret.extend(map(str, self.rulelines))
251 return '\n'.join(ret)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000252
253 def applies_to(self, useragent):
Skip Montanaro5bba2312001-02-12 20:58:30 +0000254 """check if this entry applies to the specified agent"""
255 # split the name token and make it lower case
256 useragent = useragent.split("/")[0].lower()
Skip Montanaro663f6c22001-01-20 15:59:25 +0000257 for agent in self.useragents:
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000258 if agent == '*':
Skip Montanaro5bba2312001-02-12 20:58:30 +0000259 # we have the catch-all agent
Tim Petersbc0e9102002-04-04 22:55:58 +0000260 return True
Skip Montanaro5bba2312001-02-12 20:58:30 +0000261 agent = agent.lower()
Raymond Hettingerbac788a2004-05-04 09:21:43 +0000262 if agent in useragent:
Tim Petersbc0e9102002-04-04 22:55:58 +0000263 return True
264 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000265
266 def allowance(self, filename):
267 """Preconditions:
268 - our agent applies to this entry
269 - filename is URL decoded"""
270 for line in self.rulelines:
271 if line.applies_to(filename):
272 return line.allowance
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000273 return True