blob: 4fbb0cb995ff24878a185cb231855359a872e9d3 [file] [log] [blame]
Skip Montanaro663f6c22001-01-20 15:59:25 +00001""" robotparser.py
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00002
Skip Montanaro663f6c22001-01-20 15:59:25 +00003 Copyright (C) 2000 Bastian Kleineidam
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +00004
Skip Montanaro663f6c22001-01-20 15:59:25 +00005 You can choose between two licenses when using this package:
6 1) GNU GPLv2
Martin v. Löwisd22368f2002-03-18 10:41:20 +00007 2) PSF license for Python 2.2
Skip Montanaro663f6c22001-01-20 15:59:25 +00008
9 The robots.txt Exclusion Protocol is implemented as specified in
Raymond Hettinger122541b2014-05-12 21:56:33 -070010 http://www.robotstxt.org/norobots-rfc.txt
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000011"""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000012
13import urllib.parse, urllib.request
Skip Montanaro663f6c22001-01-20 15:59:25 +000014
Skip Montanaroe99d5ea2001-01-20 19:54:20 +000015__all__ = ["RobotFileParser"]
16
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000017class RobotFileParser:
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000018 """ This class provides a set of methods to read, parse and answer
19 questions about a single robots.txt file.
20
21 """
22
Skip Montanaro663f6c22001-01-20 15:59:25 +000023 def __init__(self, url=''):
24 self.entries = []
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +000025 self.default_entry = None
Martin v. Löwis31bd5292004-08-23 20:42:35 +000026 self.disallow_all = False
27 self.allow_all = False
Skip Montanaro663f6c22001-01-20 15:59:25 +000028 self.set_url(url)
Guido van Rossum986abac1998-04-06 14:29:28 +000029 self.last_checked = 0
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000030
31 def mtime(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000032 """Returns the time the robots.txt file was last fetched.
33
34 This is useful for long-running web spiders that need to
35 check for new robots.txt files periodically.
36
37 """
Guido van Rossum986abac1998-04-06 14:29:28 +000038 return self.last_checked
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000039
40 def modified(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000041 """Sets the time the robots.txt file was last fetched to the
42 current time.
43
44 """
Guido van Rossum986abac1998-04-06 14:29:28 +000045 import time
46 self.last_checked = time.time()
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000047
48 def set_url(self, url):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000049 """Sets the URL referring to a robots.txt file."""
Guido van Rossum986abac1998-04-06 14:29:28 +000050 self.url = url
Jeremy Hylton1afc1692008-06-18 20:49:58 +000051 self.host, self.path = urllib.parse.urlparse(url)[1:3]
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000052
53 def read(self):
Raymond Hettingeraef22fb2002-05-29 16:18:42 +000054 """Reads the robots.txt URL and feeds it to the parser."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +000055 try:
56 f = urllib.request.urlopen(self.url)
57 except urllib.error.HTTPError as err:
58 if err.code in (401, 403):
59 self.disallow_all = True
Raymond Hettinger122541b2014-05-12 21:56:33 -070060 elif err.code >= 400 and err.code < 500:
Jeremy Hylton1afc1692008-06-18 20:49:58 +000061 self.allow_all = True
62 else:
Jeremy Hylton73fd46d2008-07-18 20:59:44 +000063 raw = f.read()
64 self.parse(raw.decode("utf-8").splitlines())
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000065
Martin v. Löwis73f570b2002-03-18 10:43:18 +000066 def _add_entry(self, entry):
67 if "*" in entry.useragents:
68 # the default entry is considered last
Georg Brandl0a0fc072010-07-29 17:55:01 +000069 if self.default_entry is None:
70 # the first default entry wins
71 self.default_entry = entry
Martin v. Löwis73f570b2002-03-18 10:43:18 +000072 else:
73 self.entries.append(entry)
74
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +000075 def parse(self, lines):
Jeremy Hylton1afc1692008-06-18 20:49:58 +000076 """Parse the input lines from a robots.txt file.
77
78 We allow that a user-agent: line is not preceded by
79 one or more blank lines.
80 """
Benjamin Petersond6313712008-07-31 16:23:04 +000081 # states:
82 # 0: start state
83 # 1: saw user-agent line
84 # 2: saw an allow or disallow line
Skip Montanaro663f6c22001-01-20 15:59:25 +000085 state = 0
Skip Montanaro663f6c22001-01-20 15:59:25 +000086 entry = Entry()
Tim Petersdfc538a2001-01-21 04:49:16 +000087
Raymond Hettinger122541b2014-05-12 21:56:33 -070088 self.modified()
Guido van Rossum986abac1998-04-06 14:29:28 +000089 for line in lines:
Skip Montanaro663f6c22001-01-20 15:59:25 +000090 if not line:
Christian Heimes81ee3ef2008-05-04 22:42:01 +000091 if state == 1:
Skip Montanaro663f6c22001-01-20 15:59:25 +000092 entry = Entry()
93 state = 0
Christian Heimes81ee3ef2008-05-04 22:42:01 +000094 elif state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +000095 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +000096 entry = Entry()
97 state = 0
Guido van Rossum986abac1998-04-06 14:29:28 +000098 # remove optional comment and strip line
Eric S. Raymond141971f2001-02-09 08:40:40 +000099 i = line.find('#')
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000100 if i >= 0:
Skip Montanaro663f6c22001-01-20 15:59:25 +0000101 line = line[:i]
Eric S. Raymond141971f2001-02-09 08:40:40 +0000102 line = line.strip()
Guido van Rossum986abac1998-04-06 14:29:28 +0000103 if not line:
104 continue
Eric S. Raymond141971f2001-02-09 08:40:40 +0000105 line = line.split(':', 1)
Guido van Rossum986abac1998-04-06 14:29:28 +0000106 if len(line) == 2:
Eric S. Raymond141971f2001-02-09 08:40:40 +0000107 line[0] = line[0].strip().lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000108 line[1] = urllib.parse.unquote(line[1].strip())
Skip Montanaro663f6c22001-01-20 15:59:25 +0000109 if line[0] == "user-agent":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000110 if state == 2:
Martin v. Löwis73f570b2002-03-18 10:43:18 +0000111 self._add_entry(entry)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000112 entry = Entry()
113 entry.useragents.append(line[1])
114 state = 1
115 elif line[0] == "disallow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000116 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000117 entry.rulelines.append(RuleLine(line[1], False))
Skip Montanaro663f6c22001-01-20 15:59:25 +0000118 state = 2
119 elif line[0] == "allow":
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000120 if state != 0:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000121 entry.rulelines.append(RuleLine(line[1], True))
Benjamin Petersond6313712008-07-31 16:23:04 +0000122 state = 2
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000123 if state == 2:
Georg Brandl0a0fc072010-07-29 17:55:01 +0000124 self._add_entry(entry)
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000125
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000126
Guido van Rossumdc8b7982000-03-27 19:29:31 +0000127 def can_fetch(self, useragent, url):
128 """using the parsed robots.txt decide if useragent can fetch url"""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000129 if self.disallow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000130 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000131 if self.allow_all:
Tim Petersbc0e9102002-04-04 22:55:58 +0000132 return True
Raymond Hettinger122541b2014-05-12 21:56:33 -0700133 # Until the robots.txt file has been read or found not
134 # to exist, we must assume that no url is allowable.
135 # This prevents false positives when a user erronenously
136 # calls can_fetch() before calling read().
137 if not self.last_checked:
138 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000139 # search for given user agent matches
140 # the first match counts
Senthil Kumaran3f8ab962010-07-28 16:27:56 +0000141 parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
142 url = urllib.parse.urlunparse(('','',parsed_url.path,
143 parsed_url.params,parsed_url.query, parsed_url.fragment))
144 url = urllib.parse.quote(url)
Jeremy Hylton73fd46d2008-07-18 20:59:44 +0000145 if not url:
146 url = "/"
Skip Montanaro663f6c22001-01-20 15:59:25 +0000147 for entry in self.entries:
148 if entry.applies_to(useragent):
149 return entry.allowance(url)
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000150 # try the default entry last
151 if self.default_entry:
152 return self.default_entry.allowance(url)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000153 # agent not found ==> access granted
Tim Petersbc0e9102002-04-04 22:55:58 +0000154 return True
Guido van Rossumbbf8c2f1997-01-30 03:18:23 +0000155
Skip Montanaro663f6c22001-01-20 15:59:25 +0000156 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000157 return ''.join([str(entry) + "\n" for entry in self.entries])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000158
159
160class RuleLine:
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000161 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
162 (allowance==False) followed by a path."""
Skip Montanaro663f6c22001-01-20 15:59:25 +0000163 def __init__(self, path, allowance):
Martin v. Löwis1c63f6e2002-02-28 15:24:47 +0000164 if path == '' and not allowance:
165 # an empty value means allow all
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000166 allowance = True
Senthil Kumaranc70a6ae2013-05-29 05:54:31 -0700167 path = urllib.parse.urlunparse(urllib.parse.urlparse(path))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000168 self.path = urllib.parse.quote(path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000169 self.allowance = allowance
170
171 def applies_to(self, filename):
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000172 return self.path == "*" or filename.startswith(self.path)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000173
174 def __str__(self):
Raymond Hettinger38acd4c2014-05-12 22:22:46 -0700175 return ("Allow" if self.allowance else "Disallow") + ": " + self.path
Skip Montanaro663f6c22001-01-20 15:59:25 +0000176
177
178class Entry:
179 """An entry has one or more user-agents and zero or more rulelines"""
180 def __init__(self):
181 self.useragents = []
182 self.rulelines = []
183
184 def __str__(self):
Guido van Rossumd8faa362007-04-27 19:54:29 +0000185 ret = []
Skip Montanaro663f6c22001-01-20 15:59:25 +0000186 for agent in self.useragents:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000187 ret.extend(["User-agent: ", agent, "\n"])
Skip Montanaro663f6c22001-01-20 15:59:25 +0000188 for line in self.rulelines:
Guido van Rossumd8faa362007-04-27 19:54:29 +0000189 ret.extend([str(line), "\n"])
190 return ''.join(ret)
Skip Montanaro663f6c22001-01-20 15:59:25 +0000191
192 def applies_to(self, useragent):
Skip Montanaro5bba2312001-02-12 20:58:30 +0000193 """check if this entry applies to the specified agent"""
194 # split the name token and make it lower case
195 useragent = useragent.split("/")[0].lower()
Skip Montanaro663f6c22001-01-20 15:59:25 +0000196 for agent in self.useragents:
Christian Heimes81ee3ef2008-05-04 22:42:01 +0000197 if agent == '*':
Skip Montanaro5bba2312001-02-12 20:58:30 +0000198 # we have the catch-all agent
Tim Petersbc0e9102002-04-04 22:55:58 +0000199 return True
Skip Montanaro5bba2312001-02-12 20:58:30 +0000200 agent = agent.lower()
Raymond Hettingerbac788a2004-05-04 09:21:43 +0000201 if agent in useragent:
Tim Petersbc0e9102002-04-04 22:55:58 +0000202 return True
203 return False
Skip Montanaro663f6c22001-01-20 15:59:25 +0000204
205 def allowance(self, filename):
206 """Preconditions:
207 - our agent applies to this entry
208 - filename is URL decoded"""
209 for line in self.rulelines:
210 if line.applies_to(filename):
211 return line.allowance
Martin v. Löwis31bd5292004-08-23 20:42:35 +0000212 return True