blob: 9dab4c1c3a8880c5d306030610d9accad528bf4e [file] [log] [blame]
Skip Montanaro663f6c22001-01-20 15:59:25 +00001""" robotparser.py
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00002
Skip Montanaro663f6c22001-01-20 15:59:25 +00003 Copyright (C) 2000 Bastian Kleineidam
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00004
Skip Montanaro663f6c22001-01-20 15:59:25 +00005 You can choose between two licenses when using this package:
6 1) GNU GPLv2
Martin v. Löwisd22368f2002-03-18 10:41:20 +00007 2) PSF license for Python 2.2
Skip Montanaro663f6c22001-01-20 15:59:25 +00008
9 The robots.txt Exclusion Protocol is implemented as specified in
Raymond Hettinger122541b2014-05-12 21:56:33 -070010 http://www.robotstxt.org/norobots-rfc.txt
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000011"""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000012
Berker Peksag960e8482015-10-08 12:27:06 +030013import collections
14import urllib.parse
15import urllib.request
Skip Montanaro663f6c22001-01-20 15:59:25 +000016
Skip Montanaroe99d5ea2001-01-20 19:54:20 +000017__all__ = ["RobotFileParser"]
18
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000019class RobotFileParser:
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000020 """ This class provides a set of methods to read, parse and answer
21 questions about a single robots.txt file.
22
23 """
24
Skip Montanaro663f6c22001-01-20 15:59:25 +000025 def __init__(self, url=''):
26 self.entries = []
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000027 self.default_entry = None
Martin v. Löwis31bd5292004-08-23 20:42:35 +000028 self.disallow_all = False
29 self.allow_all = False
Skip Montanaro663f6c22001-01-20 15:59:25 +000030 self.set_url(url)
Guido van Rossum986abac1998-04-06 14:29:28 +000031 self.last_checked = 0
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000032
33 def mtime(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000034 """Returns the time the robots.txt file was last fetched.
35
36 This is useful for long-running web spiders that need to
37 check for new robots.txt files periodically.
38
39 """
Guido van Rossum986abac1998-04-06 14:29:28 +000040 return self.last_checked
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000041
42 def modified(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000043 """Sets the time the robots.txt file was last fetched to the
44 current time.
45
46 """
Guido van Rossum986abac1998-04-06 14:29:28 +000047 import time
48 self.last_checked = time.time()
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000049
50 def set_url(self, url):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000051 """Sets the URL referring to a robots.txt file."""
Guido van Rossum986abac1998-04-06 14:29:28 +000052 self.url = url
Jeremy Hylton1afc1692008-06-18 20:49:58 +000053 self.host, self.path = urllib.parse.urlparse(url)[1:3]
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000054
55 def read(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000056 """Reads the robots.txt URL and feeds it to the parser."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000057 try:
58 f = urllib.request.urlopen(self.url)
59 except urllib.error.HTTPError as err:
60 if err.code in (401, 403):
61 self.disallow_all = True
Raymond Hettinger122541b2014-05-12 21:56:33 -070062 elif err.code >= 400 and err.code < 500:
Jeremy Hylton1afc1692008-06-18 20:49:58 +000063 self.allow_all = True
64 else:
Jeremy Hylton73fd46d2008-07-18 20:59:44 +000065 raw = f.read()
66 self.parse(raw.decode("utf-8").splitlines())
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000067
Martin v. Löwis73f570b2002-03-18 10:43:18 +000068 def _add_entry(self, entry):
69 if "*" in entry.useragents:
70 # the default entry is considered last
Georg Brandl0a0fc072010-07-29 17:55:01 +000071 if self.default_entry is None:
72 # the first default entry wins
73 self.default_entry = entry
Martin v. Löwis73f570b2002-03-18 10:43:18 +000074 else:
75 self.entries.append(entry)
76
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000077 def parse(self, lines):
Jeremy Hylton1afc1692008-06-18 20:49:58 +000078 """Parse the input lines from a robots.txt file.
79
80 We allow that a user-agent: line is not preceded by
81 one or more blank lines.
82 """
Benjamin Petersond6313712008-07-31 16:23:04 +000083 # states:
84 # 0: start state
85 # 1: saw user-agent line
86 # 2: saw an allow or disallow line
Skip Montanaro663f6c22001-01-20 15:59:25 +000087 state = 0
Skip Montanaro663f6c22001-01-20 15:59:25 +000088 entry = Entry()
Tim Petersdfc538a2001-01-21 04:49:16 +000089
Raymond Hettinger122541b2014-05-12 21:56:33 -070090 self.modified()
Guido van Rossum986abac1998-04-06 14:29:28 +000091 for line in lines:
Skip Montanaro663f6c22001-01-20 15:59:25 +000092 if not line:
Christian Heimes81ee3ef2008-05-04 22:42:01 +000093 if state == 1:
Skip Montanaro663f6c22001-01-20 15:59:25 +000094 entry = Entry()
95 state = 0
Christian Heimes81ee3ef2008-05-04 22:42:01 +000096 elif state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +000097 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +000098 entry = Entry()
99 state = 0
Guido van Rossum986abac1998-04-06 14:29:28 +0000100 # remove optional comment and strip line
Eric S. Raymond141971f2001-02-09 08:40:40 +0000101 i = line.find('#')
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000102 if i >= 0:
Skip Montanaro663f6c22001-01-20 15:59:25 +0000103 line = line[:i]
Eric S. Raymond141971f2001-02-09 08:40:40 +0000104 line = line.strip()
Guido van Rossum986abac1998-04-06 14:29:28 +0000105 if not line:
106 continue
Eric S. Raymond141971f2001-02-09 08:40:40 +0000107 line = line.split(':', 1)
Guido van Rossum986abac1998-04-06 14:29:28 +0000108 if len(line) == 2:
Eric S. Raymond141971f2001-02-09 08:40:40 +0000109 line[0] = line[0].strip().lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000110 line[1] = urllib.parse.unquote(line[1].strip())
Skip Montanaro663f6c22001-01-20 15:59:25 +0000111 if line[0] == "user-agent":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000112 if state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000113 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000114 entry = Entry()
115 entry.useragents.append(line[1])
116 state = 1
117 elif line[0] == "disallow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000118 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000119 entry.rulelines.append(RuleLine(line[1], False))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000120 state = 2
121 elif line[0] == "allow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000122 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000123 entry.rulelines.append(RuleLine(line[1], True))
Benjamin Petersond6313712008-07-31 16:23:04 +0000124 state = 2
Berker Peksag960e8482015-10-08 12:27:06 +0300125 elif line[0] == "crawl-delay":
126 if state != 0:
127 # before trying to convert to int we need to make
128 # sure that robots.txt has valid syntax otherwise
129 # it will crash
130 if line[1].strip().isdigit():
131 entry.delay = int(line[1])
132 state = 2
133 elif line[0] == "request-rate":
134 if state != 0:
135 numbers = line[1].split('/')
136 # check if all values are sane
137 if (len(numbers) == 2 and numbers[0].strip().isdigit()
138 and numbers[1].strip().isdigit()):
139 req_rate = collections.namedtuple('req_rate',
140 'requests seconds')
141 entry.req_rate = req_rate
142 entry.req_rate.requests = int(numbers[0])
143 entry.req_rate.seconds = int(numbers[1])
144 state = 2
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000145 if state == 2:
Georg Brandl0a0fc072010-07-29 17:55:01 +0000146 self._add_entry(entry)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000147
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000148 def can_fetch(self, useragent, url):
149 """using the parsed robots.txt decide if useragent can fetch url"""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000150 if self.disallow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000151 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000152 if self.allow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000153 return True
Raymond Hettinger122541b2014-05-12 21:56:33 -0700154 # Until the robots.txt file has been read or found not
155 # to exist, we must assume that no url is allowable.
Martin Panterf05641642016-05-08 13:48:10 +0000156 # This prevents false positives when a user erroneously
Raymond Hettinger122541b2014-05-12 21:56:33 -0700157 # calls can_fetch() before calling read().
158 if not self.last_checked:
159 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000160 # search for given user agent matches
161 # the first match counts
Senthil Kumaran3f8ab962010-07-28 16:27:56 +0000162 parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
163 url = urllib.parse.urlunparse(('','',parsed_url.path,
164 parsed_url.params,parsed_url.query, parsed_url.fragment))
165 url = urllib.parse.quote(url)
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000166 if not url:
167 url = "/"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000168 for entry in self.entries:
169 if entry.applies_to(useragent):
170 return entry.allowance(url)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000171 # try the default entry last
172 if self.default_entry:
173 return self.default_entry.allowance(url)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000174 # agent not found ==> access granted
Tim Petersbc0e9102002-04-04 22:55:58 +0000175 return True
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000176
Berker Peksag960e8482015-10-08 12:27:06 +0300177 def crawl_delay(self, useragent):
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300178 if not self.mtime():
179 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300180 for entry in self.entries:
181 if entry.applies_to(useragent):
182 return entry.delay
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300183 return self.default_entry.delay
Berker Peksag960e8482015-10-08 12:27:06 +0300184
185 def request_rate(self, useragent):
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300186 if not self.mtime():
187 return None
Berker Peksag960e8482015-10-08 12:27:06 +0300188 for entry in self.entries:
189 if entry.applies_to(useragent):
190 return entry.req_rate
Berker Peksag9a7bbb22016-09-18 20:17:58 +0300191 return self.default_entry.req_rate
Berker Peksag960e8482015-10-08 12:27:06 +0300192
Skip Montanaro663f6c22001-01-20 15:59:25 +0000193 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000194 return ''.join([str(entry) + "\n" for entry in self.entries])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000195
196
197class RuleLine:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000198 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
199 (allowance==False) followed by a path."""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000200 def __init__(self, path, allowance):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000201 if path == '' and not allowance:
202 # an empty value means allow all
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000203 allowance = True
Senthil Kumaranc70a6ae2013-05-29 05:54:31 -0700204 path = urllib.parse.urlunparse(urllib.parse.urlparse(path))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000205 self.path = urllib.parse.quote(path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000206 self.allowance = allowance
207
208 def applies_to(self, filename):
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000209 return self.path == "*" or filename.startswith(self.path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000210
211 def __str__(self):
Raymond Hettinger38acd4c2014-05-12 22:22:46 -0700212 return ("Allow" if self.allowance else "Disallow") + ": " + self.path
Skip Montanaro663f6c22001-01-20 15:59:25 +0000213
214
215class Entry:
216 """An entry has one or more user-agents and zero or more rulelines"""
217 def __init__(self):
218 self.useragents = []
219 self.rulelines = []
Berker Peksag960e8482015-10-08 12:27:06 +0300220 self.delay = None
221 self.req_rate = None
Skip Montanaro663f6c22001-01-20 15:59:25 +0000222
223 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000224 ret = []
Skip Montanaro663f6c22001-01-20 15:59:25 +0000225 for agent in self.useragents:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000226 ret.extend(["User-agent: ", agent, "\n"])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000227 for line in self.rulelines:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000228 ret.extend([str(line), "\n"])
229 return ''.join(ret)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000230
231 def applies_to(self, useragent):
Skip Montanaro5bba2312001-02-12 20:58:30 +0000232 """check if this entry applies to the specified agent"""
233 # split the name token and make it lower case
234 useragent = useragent.split("/")[0].lower()
Skip Montanaro663f6c22001-01-20 15:59:25 +0000235 for agent in self.useragents:
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000236 if agent == '*':
Skip Montanaro5bba2312001-02-12 20:58:30 +0000237 # we have the catch-all agent
Tim Petersbc0e9102002-04-04 22:55:58 +0000238 return True
Skip Montanaro5bba2312001-02-12 20:58:30 +0000239 agent = agent.lower()
Raymond Hettingerbac788a2004-05-04 09:21:43 +0000240 if agent in useragent:
Tim Petersbc0e9102002-04-04 22:55:58 +0000241 return True
242 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000243
244 def allowance(self, filename):
245 """Preconditions:
246 - our agent applies to this entry
247 - filename is URL decoded"""
248 for line in self.rulelines:
249 if line.applies_to(filename):
250 return line.allowance
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000251 return True