(partially)

Merged revisions 79534,79537,79539,79558,79606 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk

........
  r79534 | florent.xicluna | 2010-03-31 23:21:54 +0200 (mer, 31 mar 2010) | 2 lines

  Fix test for xml.etree when using a non-ascii path.  And use check_warnings instead of catch_warnings.
........
  r79537 | florent.xicluna | 2010-03-31 23:40:32 +0200 (mer, 31 mar 2010) | 2 lines

  Fix typo
........
  r79539 | florent.xicluna | 2010-04-01 00:01:03 +0200 (jeu, 01 avr 2010) | 2 lines

  Replace catch_warnings with check_warnings when it makes sense.  Use assertRaises context manager to simplify some tests.
........
  r79558 | florent.xicluna | 2010-04-01 20:17:09 +0200 (jeu, 01 avr 2010) | 2 lines

  #7092: Fix some -3 warnings, and fix Lib/platform.py when the path contains a double-quote.
........
  r79606 | florent.xicluna | 2010-04-02 19:26:42 +0200 (ven, 02 avr 2010) | 2 lines

  Backport some robotparser test and skip the test if the external resource is not available.
........
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index 86ebe9f..4c3b536 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -209,21 +209,19 @@
 class NetworkTestCase(unittest.TestCase):
 
     def testPasswordProtectedSite(self):
-        if not support.is_resource_enabled('network'):
-            return
-        # whole site is password-protected.
+        support.requires('network')
+        # XXX it depends on an external resource which could be unavailable
         url = 'http://mueblesmoraleda.com'
         parser = urllib.robotparser.RobotFileParser()
         parser.set_url(url)
         try:
             parser.read()
-            self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
         except URLError:
-            self.skipTest('mueblesmoraleda.com is unavailable')
+            self.skipTest('%s is unavailable' % url)
+        self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
 
     def testPythonOrg(self):
-        if not support.is_resource_enabled('network'):
-            return
+        support.requires('network')
         parser = urllib.robotparser.RobotFileParser(
             "http://www.python.org/robots.txt")
         parser.read()