Merged revisions 84597-84599 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k
........
r84597 | antoine.pitrou | 2010-09-07 22:42:19 +0200 (mar., 07 sept. 2010) | 5 lines
Issue #8574: better implementation of test.support.transient_internet().
Original patch by Victor.
........
r84598 | antoine.pitrou | 2010-09-07 23:05:49 +0200 (mar., 07 sept. 2010) | 6 lines
Issue #9792: In case of connection failure, socket.create_connection()
would swallow the exception and raise a new one, making it impossible
to fetch the original errno, or to filter timeout errors. Now the
original error is re-raised.
........
r84599 | antoine.pitrou | 2010-09-07 23:09:09 +0200 (mar., 07 sept. 2010) | 4 lines
Improve transient_internet() again to detect more network errors,
and use it in test_robotparser. Fixes #8574.
........
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index aa73ec5..3376a8a 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -232,23 +232,24 @@
def testPasswordProtectedSite(self):
test_support.requires('network')
- # XXX it depends on an external resource which could be unavailable
- url = 'http://mueblesmoraleda.com'
- parser = robotparser.RobotFileParser()
- parser.set_url(url)
- try:
- parser.read()
- except IOError:
- self.skipTest('%s is unavailable' % url)
- self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
+ with test_support.transient_internet('mueblesmoraleda.com'):
+ url = 'http://mueblesmoraleda.com'
+ parser = robotparser.RobotFileParser()
+ parser.set_url(url)
+ try:
+ parser.read()
+ except IOError:
+ self.skipTest('%s is unavailable' % url)
+ self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
def testPythonOrg(self):
test_support.requires('network')
- parser = robotparser.RobotFileParser(
- "http://www.python.org/robots.txt")
- parser.read()
- self.assertTrue(parser.can_fetch("*",
- "http://www.python.org/robots.txt"))
+ with test_support.transient_internet('www.python.org'):
+ parser = robotparser.RobotFileParser(
+ "http://www.python.org/robots.txt")
+ parser.read()
+ self.assertTrue(
+ parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_main():