Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 1 | """ robotparser.py |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 2 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 3 | Copyright (C) 2000 Bastian Kleineidam |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 4 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 5 | You can choose between two licenses when using this package: |
| 6 | 1) GNU GPLv2 |
| 7 | 2) PYTHON 2.0 OPEN SOURCE LICENSE |
| 8 | |
| 9 | The robots.txt Exclusion Protocol is implemented as specified in |
| 10 | http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 11 | """ |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 12 | import re,urlparse,urllib |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 13 | |
Skip Montanaro | e99d5ea | 2001-01-20 19:54:20 +0000 | [diff] [blame] | 14 | __all__ = ["RobotFileParser"] |
| 15 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 16 | debug = 0 |
| 17 | |
| 18 | def _debug(msg): |
| 19 | if debug: print msg |
| 20 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 21 | |
| 22 | class RobotFileParser: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 23 | def __init__(self, url=''): |
| 24 | self.entries = [] |
| 25 | self.disallow_all = 0 |
| 26 | self.allow_all = 0 |
| 27 | self.set_url(url) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 28 | self.last_checked = 0 |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 29 | |
| 30 | def mtime(self): |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 31 | return self.last_checked |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 32 | |
| 33 | def modified(self): |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 34 | import time |
| 35 | self.last_checked = time.time() |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 36 | |
| 37 | def set_url(self, url): |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 38 | self.url = url |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 39 | self.host, self.path = urlparse.urlparse(url)[1:3] |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 40 | |
| 41 | def read(self): |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 42 | import httplib |
| 43 | tries = 0 |
| 44 | while tries<5: |
| 45 | connection = httplib.HTTP(self.host) |
| 46 | connection.putrequest("GET", self.path) |
| 47 | connection.putheader("Host", self.host) |
| 48 | connection.endheaders() |
| 49 | status, text, mime = connection.getreply() |
| 50 | if status in [301,302] and mime: |
| 51 | tries = tries + 1 |
| 52 | newurl = mime.get("Location", mime.get("Uri", "")) |
| 53 | newurl = urlparse.urljoin(self.url, newurl) |
| 54 | self.set_url(newurl) |
| 55 | else: |
| 56 | break |
| 57 | if status==401 or status==403: |
| 58 | self.disallow_all = 1 |
| 59 | elif status>=400: |
| 60 | self.allow_all = 1 |
| 61 | else: |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 62 | # status < 400 |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 63 | self.parse(connection.getfile().readlines()) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 64 | |
| 65 | def parse(self, lines): |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 66 | """parse the input lines from a robot.txt file. |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 67 | We allow that a user-agent: line is not preceded by |
| 68 | one or more blank lines.""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 69 | state = 0 |
| 70 | linenumber = 0 |
| 71 | entry = Entry() |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 72 | |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 73 | for line in lines: |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 74 | line = line.strip() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 75 | linenumber = linenumber + 1 |
| 76 | if not line: |
| 77 | if state==1: |
| 78 | _debug("line %d: warning: you should insert" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 79 | " allow: or disallow: directives below any" |
| 80 | " user-agent: line" % linenumber) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 81 | entry = Entry() |
| 82 | state = 0 |
| 83 | elif state==2: |
| 84 | self.entries.append(entry) |
| 85 | entry = Entry() |
| 86 | state = 0 |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 87 | # remove optional comment and strip line |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 88 | i = line.find('#') |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 89 | if i>=0: |
| 90 | line = line[:i] |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 91 | line = line.strip() |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 92 | if not line: |
| 93 | continue |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 94 | line = line.split(':', 1) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 95 | if len(line) == 2: |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 96 | line[0] = line[0].strip().lower() |
| 97 | line[1] = line[1].strip() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 98 | if line[0] == "user-agent": |
| 99 | if state==2: |
| 100 | _debug("line %d: warning: you should insert a blank" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 101 | " line before any user-agent" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 102 | " directive" % linenumber) |
| 103 | self.entries.append(entry) |
| 104 | entry = Entry() |
| 105 | entry.useragents.append(line[1]) |
| 106 | state = 1 |
| 107 | elif line[0] == "disallow": |
| 108 | if state==0: |
| 109 | _debug("line %d: error: you must insert a user-agent:" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 110 | " directive before this line" % linenumber) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 111 | else: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 112 | entry.rulelines.append(RuleLine(line[1], 0)) |
| 113 | state = 2 |
| 114 | elif line[0] == "allow": |
| 115 | if state==0: |
| 116 | _debug("line %d: error: you must insert a user-agent:" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 117 | " directive before this line" % linenumber) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 118 | else: |
| 119 | entry.rulelines.append(RuleLine(line[1], 1)) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 120 | else: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 121 | _debug("line %d: warning: unknown key %s" % (linenumber, |
| 122 | line[0])) |
| 123 | else: |
| 124 | _debug("line %d: error: malformed line %s"%(linenumber, line)) |
| 125 | if state==2: |
| 126 | self.entries.append(entry) |
| 127 | _debug("Parsed rules:\n%s" % str(self)) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 128 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 129 | |
Guido van Rossum | dc8b798 | 2000-03-27 19:29:31 +0000 | [diff] [blame] | 130 | def can_fetch(self, useragent, url): |
| 131 | """using the parsed robots.txt decide if useragent can fetch url""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 132 | _debug("Checking robot.txt allowance for\n%s\n%s" % (useragent, url)) |
| 133 | if self.disallow_all: |
| 134 | return 0 |
| 135 | if self.allow_all: |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 136 | return 1 |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 137 | # search for given user agent matches |
| 138 | # the first match counts |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 139 | useragent = useragent.lower() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 140 | url = urllib.quote(urlparse.urlparse(url)[2]) |
| 141 | for entry in self.entries: |
| 142 | if entry.applies_to(useragent): |
| 143 | return entry.allowance(url) |
| 144 | # agent not found ==> access granted |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 145 | return 1 |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 146 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 147 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 148 | def __str__(self): |
| 149 | ret = "" |
| 150 | for entry in self.entries: |
| 151 | ret = ret + str(entry) + "\n" |
| 152 | return ret |
| 153 | |
| 154 | |
| 155 | class RuleLine: |
| 156 | """A rule line is a single "Allow:" (allowance==1) or "Disallow:" |
| 157 | (allowance==0) followed by a path.""" |
| 158 | def __init__(self, path, allowance): |
| 159 | self.path = urllib.quote(path) |
| 160 | self.allowance = allowance |
| 161 | |
| 162 | def applies_to(self, filename): |
| 163 | return self.path=="*" or re.match(self.path, filename) |
| 164 | |
| 165 | def __str__(self): |
| 166 | return (self.allowance and "Allow" or "Disallow")+": "+self.path |
| 167 | |
| 168 | |
| 169 | class Entry: |
| 170 | """An entry has one or more user-agents and zero or more rulelines""" |
| 171 | def __init__(self): |
| 172 | self.useragents = [] |
| 173 | self.rulelines = [] |
| 174 | |
| 175 | def __str__(self): |
| 176 | ret = "" |
| 177 | for agent in self.useragents: |
| 178 | ret = ret + "User-agent: "+agent+"\n" |
| 179 | for line in self.rulelines: |
| 180 | ret = ret + str(line) + "\n" |
| 181 | return ret |
| 182 | |
| 183 | def applies_to(self, useragent): |
| 184 | "check if this entry applies to the specified agent" |
| 185 | for agent in self.useragents: |
| 186 | if agent=="*": |
| 187 | return 1 |
| 188 | if re.match(agent, useragent): |
| 189 | return 1 |
| 190 | return 0 |
| 191 | |
| 192 | def allowance(self, filename): |
| 193 | """Preconditions: |
| 194 | - our agent applies to this entry |
| 195 | - filename is URL decoded""" |
| 196 | for line in self.rulelines: |
| 197 | if line.applies_to(filename): |
| 198 | return line.allowance |
| 199 | return 1 |
| 200 | |
| 201 | |
| 202 | def _test(): |
| 203 | global debug |
| 204 | import sys |
| 205 | rp = RobotFileParser() |
| 206 | debug = 1 |
| 207 | if len(sys.argv) <= 1: |
| 208 | rp.set_url('http://www.musi-cal.com/robots.txt') |
| 209 | rp.read() |
| 210 | else: |
| 211 | rp.parse(open(sys.argv[1]).readlines()) |
| 212 | print rp.can_fetch('*', 'http://www.musi-cal.com/') |
| 213 | print rp.can_fetch('Musi-Cal-Robot/1.0', |
| 214 | 'http://www.musi-cal.com/cgi-bin/event-search' |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 215 | '?city=San+Francisco') |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 216 | |
| 217 | if __name__ == '__main__': |
Guido van Rossum | dc8b798 | 2000-03-27 19:29:31 +0000 | [diff] [blame] | 218 | _test() |