Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 1 | """ robotparser.py |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 2 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 3 | Copyright (C) 2000 Bastian Kleineidam |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 4 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 5 | You can choose between two licenses when using this package: |
| 6 | 1) GNU GPLv2 |
Martin v. Löwis | d22368f | 2002-03-18 10:41:20 +0000 | [diff] [blame] | 7 | 2) PSF license for Python 2.2 |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 8 | |
| 9 | The robots.txt Exclusion Protocol is implemented as specified in |
Raymond Hettinger | a5413c4 | 2014-05-12 22:18:50 -0700 | [diff] [blame] | 10 | http://www.robotstxt.org/norobots-rfc.txt |
| 11 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 12 | """ |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 13 | import urlparse |
| 14 | import urllib |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 15 | |
Skip Montanaro | e99d5ea | 2001-01-20 19:54:20 +0000 | [diff] [blame] | 16 | __all__ = ["RobotFileParser"] |
| 17 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 18 | |
| 19 | class RobotFileParser: |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 20 | """ This class provides a set of methods to read, parse and answer |
| 21 | questions about a single robots.txt file. |
| 22 | |
| 23 | """ |
| 24 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 25 | def __init__(self, url=''): |
| 26 | self.entries = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 27 | self.default_entry = None |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 28 | self.disallow_all = False |
| 29 | self.allow_all = False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 30 | self.set_url(url) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 31 | self.last_checked = 0 |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 32 | |
| 33 | def mtime(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 34 | """Returns the time the robots.txt file was last fetched. |
| 35 | |
| 36 | This is useful for long-running web spiders that need to |
| 37 | check for new robots.txt files periodically. |
| 38 | |
| 39 | """ |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 40 | return self.last_checked |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 41 | |
| 42 | def modified(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 43 | """Sets the time the robots.txt file was last fetched to the |
| 44 | current time. |
| 45 | |
| 46 | """ |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 47 | import time |
| 48 | self.last_checked = time.time() |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 49 | |
| 50 | def set_url(self, url): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 51 | """Sets the URL referring to a robots.txt file.""" |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 52 | self.url = url |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 53 | self.host, self.path = urlparse.urlparse(url)[1:3] |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 54 | |
| 55 | def read(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 56 | """Reads the robots.txt URL and feeds it to the parser.""" |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 57 | opener = URLopener() |
| 58 | f = opener.open(self.url) |
Benjamin Peterson | 0522a9f | 2008-07-12 23:41:19 +0000 | [diff] [blame] | 59 | lines = [line.strip() for line in f] |
| 60 | f.close() |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 61 | self.errcode = opener.errcode |
Georg Brandl | 4ffc8f5 | 2007-03-13 09:41:31 +0000 | [diff] [blame] | 62 | if self.errcode in (401, 403): |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 63 | self.disallow_all = True |
Raymond Hettinger | a5413c4 | 2014-05-12 22:18:50 -0700 | [diff] [blame] | 64 | elif self.errcode >= 400 and self.errcode < 500: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 65 | self.allow_all = True |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 66 | elif self.errcode == 200 and lines: |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 67 | self.parse(lines) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 68 | |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 69 | def _add_entry(self, entry): |
| 70 | if "*" in entry.useragents: |
| 71 | # the default entry is considered last |
Georg Brandl | 2bd953e | 2010-08-01 20:59:03 +0000 | [diff] [blame] | 72 | if self.default_entry is None: |
| 73 | # the first default entry wins |
| 74 | self.default_entry = entry |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 75 | else: |
| 76 | self.entries.append(entry) |
| 77 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 78 | def parse(self, lines): |
Raymond Hettinger | 2d95f1a | 2004-03-13 20:27:23 +0000 | [diff] [blame] | 79 | """parse the input lines from a robots.txt file. |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 80 | We allow that a user-agent: line is not preceded by |
| 81 | one or more blank lines.""" |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 82 | # states: |
| 83 | # 0: start state |
| 84 | # 1: saw user-agent line |
| 85 | # 2: saw an allow or disallow line |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 86 | state = 0 |
| 87 | linenumber = 0 |
| 88 | entry = Entry() |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 89 | |
Raymond Hettinger | a5413c4 | 2014-05-12 22:18:50 -0700 | [diff] [blame] | 90 | self.modified() |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 91 | for line in lines: |
Benjamin Peterson | 0522a9f | 2008-07-12 23:41:19 +0000 | [diff] [blame] | 92 | linenumber += 1 |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 93 | if not line: |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 94 | if state == 1: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 95 | entry = Entry() |
| 96 | state = 0 |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 97 | elif state == 2: |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 98 | self._add_entry(entry) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 99 | entry = Entry() |
| 100 | state = 0 |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 101 | # remove optional comment and strip line |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 102 | i = line.find('#') |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 103 | if i >= 0: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 104 | line = line[:i] |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 105 | line = line.strip() |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 106 | if not line: |
| 107 | continue |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 108 | line = line.split(':', 1) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 109 | if len(line) == 2: |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 110 | line[0] = line[0].strip().lower() |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 111 | line[1] = urllib.unquote(line[1].strip()) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 112 | if line[0] == "user-agent": |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 113 | if state == 2: |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 114 | self._add_entry(entry) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 115 | entry = Entry() |
| 116 | entry.useragents.append(line[1]) |
| 117 | state = 1 |
| 118 | elif line[0] == "disallow": |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 119 | if state != 0: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 120 | entry.rulelines.append(RuleLine(line[1], False)) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 121 | state = 2 |
| 122 | elif line[0] == "allow": |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 123 | if state != 0: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 124 | entry.rulelines.append(RuleLine(line[1], True)) |
Skip Montanaro | 1ef19f0 | 2008-07-27 00:49:02 +0000 | [diff] [blame] | 125 | state = 2 |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 126 | if state == 2: |
Georg Brandl | 2bd953e | 2010-08-01 20:59:03 +0000 | [diff] [blame] | 127 | self._add_entry(entry) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 128 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 129 | |
Guido van Rossum | dc8b798 | 2000-03-27 19:29:31 +0000 | [diff] [blame] | 130 | def can_fetch(self, useragent, url): |
| 131 | """using the parsed robots.txt decide if useragent can fetch url""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 132 | if self.disallow_all: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 133 | return False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 134 | if self.allow_all: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 135 | return True |
Raymond Hettinger | a5413c4 | 2014-05-12 22:18:50 -0700 | [diff] [blame] | 136 | |
| 137 | # Until the robots.txt file has been read or found not |
| 138 | # to exist, we must assume that no url is allowable. |
| 139 | # This prevents false positives when a user erronenously |
| 140 | # calls can_fetch() before calling read(). |
| 141 | if not self.last_checked: |
| 142 | return False |
| 143 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 144 | # search for given user agent matches |
| 145 | # the first match counts |
Senthil Kumaran | a4f79f9 | 2010-07-28 16:35:35 +0000 | [diff] [blame] | 146 | parsed_url = urlparse.urlparse(urllib.unquote(url)) |
| 147 | url = urlparse.urlunparse(('', '', parsed_url.path, |
| 148 | parsed_url.params, parsed_url.query, parsed_url.fragment)) |
| 149 | url = urllib.quote(url) |
| 150 | if not url: |
| 151 | url = "/" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 152 | for entry in self.entries: |
| 153 | if entry.applies_to(useragent): |
| 154 | return entry.allowance(url) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 155 | # try the default entry last |
| 156 | if self.default_entry: |
| 157 | return self.default_entry.allowance(url) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 158 | # agent not found ==> access granted |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 159 | return True |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 160 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 161 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 162 | def __str__(self): |
Georg Brandl | 4ffc8f5 | 2007-03-13 09:41:31 +0000 | [diff] [blame] | 163 | return ''.join([str(entry) + "\n" for entry in self.entries]) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 164 | |
| 165 | |
| 166 | class RuleLine: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 167 | """A rule line is a single "Allow:" (allowance==True) or "Disallow:" |
| 168 | (allowance==False) followed by a path.""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 169 | def __init__(self, path, allowance): |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 170 | if path == '' and not allowance: |
| 171 | # an empty value means allow all |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 172 | allowance = True |
Senthil Kumaran | 2c4810e | 2013-05-29 05:58:47 -0700 | [diff] [blame] | 173 | path = urlparse.urlunparse(urlparse.urlparse(path)) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 174 | self.path = urllib.quote(path) |
| 175 | self.allowance = allowance |
| 176 | |
| 177 | def applies_to(self, filename): |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 178 | return self.path == "*" or filename.startswith(self.path) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 179 | |
| 180 | def __str__(self): |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 181 | return (self.allowance and "Allow" or "Disallow") + ": " + self.path |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 182 | |
| 183 | |
| 184 | class Entry: |
| 185 | """An entry has one or more user-agents and zero or more rulelines""" |
| 186 | def __init__(self): |
| 187 | self.useragents = [] |
| 188 | self.rulelines = [] |
| 189 | |
| 190 | def __str__(self): |
Georg Brandl | 4ffc8f5 | 2007-03-13 09:41:31 +0000 | [diff] [blame] | 191 | ret = [] |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 192 | for agent in self.useragents: |
Georg Brandl | 4ffc8f5 | 2007-03-13 09:41:31 +0000 | [diff] [blame] | 193 | ret.extend(["User-agent: ", agent, "\n"]) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 194 | for line in self.rulelines: |
Georg Brandl | 4ffc8f5 | 2007-03-13 09:41:31 +0000 | [diff] [blame] | 195 | ret.extend([str(line), "\n"]) |
| 196 | return ''.join(ret) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 197 | |
| 198 | def applies_to(self, useragent): |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 199 | """check if this entry applies to the specified agent""" |
| 200 | # split the name token and make it lower case |
| 201 | useragent = useragent.split("/")[0].lower() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 202 | for agent in self.useragents: |
Skip Montanaro | b8bdbc0 | 2008-04-28 03:27:53 +0000 | [diff] [blame] | 203 | if agent == '*': |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 204 | # we have the catch-all agent |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 205 | return True |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 206 | agent = agent.lower() |
Raymond Hettinger | bac788a | 2004-05-04 09:21:43 +0000 | [diff] [blame] | 207 | if agent in useragent: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 208 | return True |
| 209 | return False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 210 | |
| 211 | def allowance(self, filename): |
| 212 | """Preconditions: |
| 213 | - our agent applies to this entry |
| 214 | - filename is URL decoded""" |
| 215 | for line in self.rulelines: |
| 216 | if line.applies_to(filename): |
| 217 | return line.allowance |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 218 | return True |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 219 | |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 220 | class URLopener(urllib.FancyURLopener): |
| 221 | def __init__(self, *args): |
Guido van Rossum | 68468eb | 2003-02-27 20:14:51 +0000 | [diff] [blame] | 222 | urllib.FancyURLopener.__init__(self, *args) |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 223 | self.errcode = 200 |
Tim Peters | 0e6d213 | 2001-02-15 23:56:39 +0000 | [diff] [blame] | 224 | |
Skip Montanaro | 1a41313 | 2007-08-28 23:22:52 +0000 | [diff] [blame] | 225 | def prompt_user_passwd(self, host, realm): |
| 226 | ## If robots.txt file is accessible only with a password, |
| 227 | ## we act as if the file wasn't there. |
| 228 | return None, None |
| 229 | |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 230 | def http_error_default(self, url, fp, errcode, errmsg, headers): |
| 231 | self.errcode = errcode |
| 232 | return urllib.FancyURLopener.http_error_default(self, url, fp, errcode, |
| 233 | errmsg, headers) |