Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 1 | """ robotparser.py |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 2 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 3 | Copyright (C) 2000 Bastian Kleineidam |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 4 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 5 | You can choose between two licenses when using this package: |
| 6 | 1) GNU GPLv2 |
Martin v. Löwis | d22368f | 2002-03-18 10:41:20 +0000 | [diff] [blame] | 7 | 2) PSF license for Python 2.2 |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 8 | |
| 9 | The robots.txt Exclusion Protocol is implemented as specified in |
| 10 | http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 11 | """ |
Neal Norwitz | 5aee504 | 2002-05-31 14:14:06 +0000 | [diff] [blame] | 12 | import urlparse,urllib |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 13 | |
Skip Montanaro | e99d5ea | 2001-01-20 19:54:20 +0000 | [diff] [blame] | 14 | __all__ = ["RobotFileParser"] |
| 15 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 16 | debug = 0 |
| 17 | |
| 18 | def _debug(msg): |
Guido van Rossum | be19ed7 | 2007-02-09 05:37:30 +0000 | [diff] [blame] | 19 | if debug: print(msg) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 20 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 21 | |
| 22 | class RobotFileParser: |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 23 | """ This class provides a set of methods to read, parse and answer |
| 24 | questions about a single robots.txt file. |
| 25 | |
| 26 | """ |
| 27 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 28 | def __init__(self, url=''): |
| 29 | self.entries = [] |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 30 | self.default_entry = None |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 31 | self.disallow_all = False |
| 32 | self.allow_all = False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 33 | self.set_url(url) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 34 | self.last_checked = 0 |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 35 | |
| 36 | def mtime(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 37 | """Returns the time the robots.txt file was last fetched. |
| 38 | |
| 39 | This is useful for long-running web spiders that need to |
| 40 | check for new robots.txt files periodically. |
| 41 | |
| 42 | """ |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 43 | return self.last_checked |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 44 | |
| 45 | def modified(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 46 | """Sets the time the robots.txt file was last fetched to the |
| 47 | current time. |
| 48 | |
| 49 | """ |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 50 | import time |
| 51 | self.last_checked = time.time() |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 52 | |
| 53 | def set_url(self, url): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 54 | """Sets the URL referring to a robots.txt file.""" |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 55 | self.url = url |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 56 | self.host, self.path = urlparse.urlparse(url)[1:3] |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 57 | |
| 58 | def read(self): |
Raymond Hettinger | aef22fb | 2002-05-29 16:18:42 +0000 | [diff] [blame] | 59 | """Reads the robots.txt URL and feeds it to the parser.""" |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 60 | opener = URLopener() |
| 61 | f = opener.open(self.url) |
Martin v. Löwis | d22368f | 2002-03-18 10:41:20 +0000 | [diff] [blame] | 62 | lines = [] |
| 63 | line = f.readline() |
| 64 | while line: |
| 65 | lines.append(line.strip()) |
| 66 | line = f.readline() |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 67 | self.errcode = opener.errcode |
| 68 | if self.errcode == 401 or self.errcode == 403: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 69 | self.disallow_all = True |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 70 | _debug("disallow all") |
| 71 | elif self.errcode >= 400: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 72 | self.allow_all = True |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 73 | _debug("allow all") |
| 74 | elif self.errcode == 200 and lines: |
| 75 | _debug("parse lines") |
| 76 | self.parse(lines) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 77 | |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 78 | def _add_entry(self, entry): |
| 79 | if "*" in entry.useragents: |
| 80 | # the default entry is considered last |
| 81 | self.default_entry = entry |
| 82 | else: |
| 83 | self.entries.append(entry) |
| 84 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 85 | def parse(self, lines): |
Raymond Hettinger | 2d95f1a | 2004-03-13 20:27:23 +0000 | [diff] [blame] | 86 | """parse the input lines from a robots.txt file. |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 87 | We allow that a user-agent: line is not preceded by |
| 88 | one or more blank lines.""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 89 | state = 0 |
| 90 | linenumber = 0 |
| 91 | entry = Entry() |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 92 | |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 93 | for line in lines: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 94 | linenumber = linenumber + 1 |
| 95 | if not line: |
| 96 | if state==1: |
| 97 | _debug("line %d: warning: you should insert" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 98 | " allow: or disallow: directives below any" |
| 99 | " user-agent: line" % linenumber) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 100 | entry = Entry() |
| 101 | state = 0 |
| 102 | elif state==2: |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 103 | self._add_entry(entry) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 104 | entry = Entry() |
| 105 | state = 0 |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 106 | # remove optional comment and strip line |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 107 | i = line.find('#') |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 108 | if i>=0: |
| 109 | line = line[:i] |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 110 | line = line.strip() |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 111 | if not line: |
| 112 | continue |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 113 | line = line.split(':', 1) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 114 | if len(line) == 2: |
Eric S. Raymond | 141971f | 2001-02-09 08:40:40 +0000 | [diff] [blame] | 115 | line[0] = line[0].strip().lower() |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 116 | line[1] = urllib.unquote(line[1].strip()) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 117 | if line[0] == "user-agent": |
| 118 | if state==2: |
| 119 | _debug("line %d: warning: you should insert a blank" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 120 | " line before any user-agent" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 121 | " directive" % linenumber) |
Martin v. Löwis | 73f570b | 2002-03-18 10:43:18 +0000 | [diff] [blame] | 122 | self._add_entry(entry) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 123 | entry = Entry() |
| 124 | entry.useragents.append(line[1]) |
| 125 | state = 1 |
| 126 | elif line[0] == "disallow": |
| 127 | if state==0: |
| 128 | _debug("line %d: error: you must insert a user-agent:" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 129 | " directive before this line" % linenumber) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 130 | else: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 131 | entry.rulelines.append(RuleLine(line[1], False)) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 132 | state = 2 |
| 133 | elif line[0] == "allow": |
| 134 | if state==0: |
| 135 | _debug("line %d: error: you must insert a user-agent:" |
Tim Peters | dfc538a | 2001-01-21 04:49:16 +0000 | [diff] [blame] | 136 | " directive before this line" % linenumber) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 137 | else: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 138 | entry.rulelines.append(RuleLine(line[1], True)) |
Guido van Rossum | 986abac | 1998-04-06 14:29:28 +0000 | [diff] [blame] | 139 | else: |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 140 | _debug("line %d: warning: unknown key %s" % (linenumber, |
| 141 | line[0])) |
| 142 | else: |
| 143 | _debug("line %d: error: malformed line %s"%(linenumber, line)) |
| 144 | if state==2: |
| 145 | self.entries.append(entry) |
| 146 | _debug("Parsed rules:\n%s" % str(self)) |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 147 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 148 | |
Guido van Rossum | dc8b798 | 2000-03-27 19:29:31 +0000 | [diff] [blame] | 149 | def can_fetch(self, useragent, url): |
| 150 | """using the parsed robots.txt decide if useragent can fetch url""" |
Raymond Hettinger | 2d95f1a | 2004-03-13 20:27:23 +0000 | [diff] [blame] | 151 | _debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" % |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 152 | (useragent, url)) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 153 | if self.disallow_all: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 154 | return False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 155 | if self.allow_all: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 156 | return True |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 157 | # search for given user agent matches |
| 158 | # the first match counts |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 159 | url = urllib.quote(urlparse.urlparse(urllib.unquote(url))[2]) or "/" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 160 | for entry in self.entries: |
| 161 | if entry.applies_to(useragent): |
| 162 | return entry.allowance(url) |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 163 | # try the default entry last |
| 164 | if self.default_entry: |
| 165 | return self.default_entry.allowance(url) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 166 | # agent not found ==> access granted |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 167 | return True |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 168 | |
Guido van Rossum | bbf8c2f | 1997-01-30 03:18:23 +0000 | [diff] [blame] | 169 | |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 170 | def __str__(self): |
| 171 | ret = "" |
| 172 | for entry in self.entries: |
| 173 | ret = ret + str(entry) + "\n" |
| 174 | return ret |
| 175 | |
| 176 | |
| 177 | class RuleLine: |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 178 | """A rule line is a single "Allow:" (allowance==True) or "Disallow:" |
| 179 | (allowance==False) followed by a path.""" |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 180 | def __init__(self, path, allowance): |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 181 | if path == '' and not allowance: |
| 182 | # an empty value means allow all |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 183 | allowance = True |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 184 | self.path = urllib.quote(path) |
| 185 | self.allowance = allowance |
| 186 | |
| 187 | def applies_to(self, filename): |
Martin v. Löwis | 1c63f6e | 2002-02-28 15:24:47 +0000 | [diff] [blame] | 188 | return self.path=="*" or filename.startswith(self.path) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 189 | |
| 190 | def __str__(self): |
| 191 | return (self.allowance and "Allow" or "Disallow")+": "+self.path |
| 192 | |
| 193 | |
| 194 | class Entry: |
| 195 | """An entry has one or more user-agents and zero or more rulelines""" |
| 196 | def __init__(self): |
| 197 | self.useragents = [] |
| 198 | self.rulelines = [] |
| 199 | |
| 200 | def __str__(self): |
| 201 | ret = "" |
| 202 | for agent in self.useragents: |
| 203 | ret = ret + "User-agent: "+agent+"\n" |
| 204 | for line in self.rulelines: |
| 205 | ret = ret + str(line) + "\n" |
| 206 | return ret |
| 207 | |
| 208 | def applies_to(self, useragent): |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 209 | """check if this entry applies to the specified agent""" |
| 210 | # split the name token and make it lower case |
| 211 | useragent = useragent.split("/")[0].lower() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 212 | for agent in self.useragents: |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 213 | if agent=='*': |
| 214 | # we have the catch-all agent |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 215 | return True |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 216 | agent = agent.lower() |
Raymond Hettinger | bac788a | 2004-05-04 09:21:43 +0000 | [diff] [blame] | 217 | if agent in useragent: |
Tim Peters | bc0e910 | 2002-04-04 22:55:58 +0000 | [diff] [blame] | 218 | return True |
| 219 | return False |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 220 | |
| 221 | def allowance(self, filename): |
| 222 | """Preconditions: |
| 223 | - our agent applies to this entry |
| 224 | - filename is URL decoded""" |
| 225 | for line in self.rulelines: |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 226 | _debug((filename, str(line), line.allowance)) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 227 | if line.applies_to(filename): |
| 228 | return line.allowance |
Martin v. Löwis | 31bd529 | 2004-08-23 20:42:35 +0000 | [diff] [blame] | 229 | return True |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 230 | |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 231 | class URLopener(urllib.FancyURLopener): |
| 232 | def __init__(self, *args): |
Guido van Rossum | 68468eb | 2003-02-27 20:14:51 +0000 | [diff] [blame] | 233 | urllib.FancyURLopener.__init__(self, *args) |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 234 | self.errcode = 200 |
Tim Peters | 0e6d213 | 2001-02-15 23:56:39 +0000 | [diff] [blame] | 235 | |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 236 | def http_error_default(self, url, fp, errcode, errmsg, headers): |
| 237 | self.errcode = errcode |
| 238 | return urllib.FancyURLopener.http_error_default(self, url, fp, errcode, |
| 239 | errmsg, headers) |
| 240 | |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 241 | def _check(a,b): |
| 242 | if not b: |
| 243 | ac = "access denied" |
| 244 | else: |
| 245 | ac = "access allowed" |
| 246 | if a!=b: |
Guido van Rossum | be19ed7 | 2007-02-09 05:37:30 +0000 | [diff] [blame] | 247 | print("failed") |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 248 | else: |
Guido van Rossum | be19ed7 | 2007-02-09 05:37:30 +0000 | [diff] [blame] | 249 | print("ok (%s)" % ac) |
| 250 | print() |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 251 | |
| 252 | def _test(): |
| 253 | global debug |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 254 | rp = RobotFileParser() |
| 255 | debug = 1 |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 256 | |
| 257 | # robots.txt that exists, gotten to by redirection |
| 258 | rp.set_url('http://www.musi-cal.com/robots.txt') |
| 259 | rp.read() |
| 260 | |
| 261 | # test for re.escape |
| 262 | _check(rp.can_fetch('*', 'http://www.musi-cal.com/'), 1) |
| 263 | # this should match the first rule, which is a disallow |
| 264 | _check(rp.can_fetch('', 'http://www.musi-cal.com/'), 0) |
| 265 | # various cherry pickers |
| 266 | _check(rp.can_fetch('CherryPickerSE', |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 267 | 'http://www.musi-cal.com/cgi-bin/event-search' |
Skip Montanaro | 5bba231 | 2001-02-12 20:58:30 +0000 | [diff] [blame] | 268 | '?city=San+Francisco'), 0) |
| 269 | _check(rp.can_fetch('CherryPickerSE/1.0', |
| 270 | 'http://www.musi-cal.com/cgi-bin/event-search' |
| 271 | '?city=San+Francisco'), 0) |
| 272 | _check(rp.can_fetch('CherryPickerSE/1.5', |
| 273 | 'http://www.musi-cal.com/cgi-bin/event-search' |
| 274 | '?city=San+Francisco'), 0) |
| 275 | # case sensitivity |
| 276 | _check(rp.can_fetch('ExtractorPro', 'http://www.musi-cal.com/blubba'), 0) |
| 277 | _check(rp.can_fetch('extractorpro', 'http://www.musi-cal.com/blubba'), 0) |
| 278 | # substring test |
| 279 | _check(rp.can_fetch('toolpak/1.1', 'http://www.musi-cal.com/blubba'), 0) |
| 280 | # tests for catch-all * agent |
| 281 | _check(rp.can_fetch('spam', 'http://www.musi-cal.com/search'), 0) |
| 282 | _check(rp.can_fetch('spam', 'http://www.musi-cal.com/Musician/me'), 1) |
| 283 | _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1) |
| 284 | _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1) |
| 285 | |
| 286 | # robots.txt that does not exist |
| 287 | rp.set_url('http://www.lycos.com/robots.txt') |
| 288 | rp.read() |
| 289 | _check(rp.can_fetch('Mozilla', 'http://www.lycos.com/search'), 1) |
Skip Montanaro | 663f6c2 | 2001-01-20 15:59:25 +0000 | [diff] [blame] | 290 | |
| 291 | if __name__ == '__main__': |
Guido van Rossum | dc8b798 | 2000-03-27 19:29:31 +0000 | [diff] [blame] | 292 | _test() |