bpo-30296 Remove unnecessary tuples, lists, sets, and dicts (#1489)

* Replaced list(<generator expression>) with list comprehension
* Replaced dict(<generator expression>) with dict comprehension
* Replaced set(<list literal>) with set literal
* Replaced builtin func(<list comprehension>) with func(<generator
  expression>) when supported (e.g. any(), all(), tuple(), min(), &
  max())
diff --git a/Lib/_weakrefset.py b/Lib/_weakrefset.py
index 4d0de8c..304c66f 100644
--- a/Lib/_weakrefset.py
+++ b/Lib/_weakrefset.py
@@ -157,19 +157,19 @@
     __le__ = issubset
 
     def __lt__(self, other):
-        return self.data < set(ref(item) for item in other)
+        return self.data < set(map(ref, other))
 
     def issuperset(self, other):
         return self.data.issuperset(ref(item) for item in other)
     __ge__ = issuperset
 
     def __gt__(self, other):
-        return self.data > set(ref(item) for item in other)
+        return self.data > set(map(ref, other))
 
     def __eq__(self, other):
         if not isinstance(other, self.__class__):
             return NotImplemented
-        return self.data == set(ref(item) for item in other)
+        return self.data == set(map(ref, other))
 
     def symmetric_difference(self, other):
         newset = self.copy()
diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py
index 2119127..c401ddc 100644
--- a/Lib/distutils/msvc9compiler.py
+++ b/Lib/distutils/msvc9compiler.py
@@ -255,7 +255,7 @@
     """Launch vcvarsall.bat and read the settings from its environment
     """
     vcvarsall = find_vcvarsall(version)
-    interesting = set(("include", "lib", "libpath", "path"))
+    interesting = {"include", "lib", "libpath", "path"}
     result = {}
 
     if vcvarsall is None:
diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py
index 0fc2231..81fee14 100644
--- a/Lib/email/headerregistry.py
+++ b/Lib/email/headerregistry.py
@@ -369,8 +369,8 @@
     @property
     def addresses(self):
         if self._addresses is None:
-            self._addresses = tuple([address for group in self._groups
-                                             for address in group.addresses])
+            self._addresses = tuple(address for group in self._groups
+                                            for address in group.addresses)
         return self._addresses
 
 
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 3317f58..9c072eb 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -389,7 +389,7 @@
 
     mro = getmro(cls)
     metamro = getmro(type(cls)) # for attributes stored in the metaclass
-    metamro = tuple([cls for cls in metamro if cls not in (type, object)])
+    metamro = tuple(cls for cls in metamro if cls not in (type, object))
     class_bases = (cls,) + mro
     all_bases = class_bases + metamro
     names = dir(cls)
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 917178e..d692514 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -463,7 +463,7 @@
             c = self.resolve(c)
         props = config.pop('.', None)
         # Check for valid identifiers
-        kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
         result = c(**kwargs)
         if props:
             for name, value in props.items():
@@ -726,7 +726,7 @@
                 config['address'] = self.as_tuple(config['address'])
             factory = klass
         props = config.pop('.', None)
-        kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
         try:
             result = factory(**kwargs)
         except TypeError as te:
diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py
index a3d491b..c98ee43 100644
--- a/Lib/multiprocessing/context.py
+++ b/Lib/multiprocessing/context.py
@@ -261,7 +261,7 @@
             else:
                 return ['fork', 'spawn']
 
-DefaultContext.__all__ = list(x for x in dir(DefaultContext) if x[0] != '_')
+DefaultContext.__all__ = [x for x in dir(DefaultContext) if x[0] != '_']
 
 #
 # Context types for fixed start method
diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py
index d5ce625..6e09539 100644
--- a/Lib/multiprocessing/forkserver.py
+++ b/Lib/multiprocessing/forkserver.py
@@ -98,8 +98,7 @@
             if self._preload_modules:
                 desired_keys = {'main_path', 'sys_path'}
                 data = spawn.get_preparation_data('ignore')
-                data = dict((x,y) for (x,y) in data.items()
-                            if x in desired_keys)
+                data = {x: y for x, y in data.items() if x in desired_keys}
             else:
                 data = {}
 
diff --git a/Lib/multiprocessing/sharedctypes.py b/Lib/multiprocessing/sharedctypes.py
index 25cbcf2..7228751 100644
--- a/Lib/multiprocessing/sharedctypes.py
+++ b/Lib/multiprocessing/sharedctypes.py
@@ -115,7 +115,7 @@
             scls = class_cache[cls]
         except KeyError:
             names = [field[0] for field in cls._fields_]
-            d = dict((name, make_property(name)) for name in names)
+            d = {name: make_property(name) for name in names}
             classname = 'Synchronized' + cls.__name__
             scls = class_cache[cls] = type(classname, (SynchronizedBase,), d)
         return scls(obj, lock, ctx)
diff --git a/Lib/pathlib.py b/Lib/pathlib.py
index 4368eba..4d89436 100644
--- a/Lib/pathlib.py
+++ b/Lib/pathlib.py
@@ -114,10 +114,7 @@
 
     is_supported = (os.name == 'nt')
 
-    drive_letters = (
-        set(chr(x) for x in range(ord('a'), ord('z') + 1)) |
-        set(chr(x) for x in range(ord('A'), ord('Z') + 1))
-    )
+    drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
     ext_namespace_prefix = '\\\\?\\'
 
     reserved_names = (
diff --git a/Lib/pstats.py b/Lib/pstats.py
index b8bcfb2..b7a2054 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -500,8 +500,7 @@
         if func in new_callers:
             if isinstance(caller, tuple):
                 # format used by cProfile
-                new_callers[func] = tuple([i[0] + i[1] for i in
-                                           zip(caller, new_callers[func])])
+                new_callers[func] = tuple(i[0] + i[1] for i in zip(caller, new_callers[func]))
             else:
                 # format used by profile
                 new_callers[func] += caller
diff --git a/Lib/symtable.py b/Lib/symtable.py
index b0e5260..c7627a6 100644
--- a/Lib/symtable.py
+++ b/Lib/symtable.py
@@ -119,8 +119,8 @@
     __globals = None
 
     def __idents_matching(self, test_func):
-        return tuple([ident for ident in self.get_identifiers()
-                      if test_func(self._table.symbols[ident])])
+        return tuple(ident for ident in self.get_identifiers()
+                     if test_func(self._table.symbols[ident]))
 
     def get_parameters(self):
         if self.__params is None:
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index eea88b7..634662d 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -142,7 +142,7 @@
     #  'rf'). The various permutations will be generated.
     _valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
     # if we add binary f-strings, add: ['fb', 'fbr']
-    result = set([''])
+    result = {''}
     for prefix in _valid_string_prefixes:
         for t in _itertools.permutations(prefix):
             # create a list with upper and lower versions of each
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 09bda71..fb3bce1 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -253,8 +253,7 @@
         self._line = line
         if lookup_line:
             self.line
-        self.locals = \
-            dict((k, repr(v)) for k, v in locals.items()) if locals else None
+        self.locals = {k: repr(v) for k, v in locals.items()} if locals else None
 
     def __eq__(self, other):
         if isinstance(other, FrameSummary):
diff --git a/Lib/turtle.py b/Lib/turtle.py
index 8036b7f..b2623f1 100644
--- a/Lib/turtle.py
+++ b/Lib/turtle.py
@@ -1175,7 +1175,7 @@
             cl = [16*int(cstr[h], 16) for h in cstr[1:]]
         else:
             raise TurtleGraphicsError("bad colorstring: %s" % cstr)
-        return tuple([c * self._colormode/255 for c in cl])
+        return tuple(c * self._colormode/255 for c in cl)
 
     def colormode(self, cmode=None):
         """Return the colormode or set it to 1.0 or 255.
@@ -2989,7 +2989,7 @@
             t11, t12, t21, t22 = l, 0, 0, l
         elif self._resizemode == "noresize":
             return polygon
-        return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon])
+        return tuple((t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon)
 
     def _drawturtle(self):
         """Manages the correct rendering of the turtle with respect to
@@ -3839,8 +3839,8 @@
         docsdict[key] = eval(key).__doc__
 
     with open("%s.py" % filename,"w") as f:
-        keys = sorted([x for x in docsdict.keys()
-                            if x.split('.')[1] not in _alias_list])
+        keys = sorted(x for x in docsdict.keys()
+                      if x.split('.')[1] not in _alias_list)
         f.write('docsdict = {\n\n')
         for key in keys[:-1]:
             f.write('%s :\n' % repr(key))
diff --git a/Lib/turtledemo/wikipedia.py b/Lib/turtledemo/wikipedia.py
index 0f27442..d6bbad8 100644
--- a/Lib/turtledemo/wikipedia.py
+++ b/Lib/turtledemo/wikipedia.py
@@ -52,7 +52,7 @@
     sleep(1)
 
     at = clock()
-    while any([t.undobufferentries() for t in s.turtles()]):
+    while any(t.undobufferentries() for t in s.turtles()):
         for t in s.turtles():
             t.undo()
     et = clock()
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 3f8dcfb..a192d52 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -683,8 +683,8 @@
         newurl = newurl.replace(' ', '%20')
 
         CONTENT_HEADERS = ("content-length", "content-type")
-        newheaders = dict((k, v) for k, v in req.headers.items()
-                          if k.lower() not in CONTENT_HEADERS)
+        newheaders = {k: v for k, v in req.headers.items()
+                      if k.lower() not in CONTENT_HEADERS}
         return Request(newurl,
                        headers=newheaders,
                        origin_req_host=req.origin_req_host,
@@ -845,7 +845,7 @@
             self.passwd[realm] = {}
         for default_port in True, False:
             reduced_uri = tuple(
-                [self.reduce_uri(u, default_port) for u in uri])
+                self.reduce_uri(u, default_port) for u in uri)
             self.passwd[realm][reduced_uri] = (user, passwd)
 
     def find_user_password(self, realm, authuri):
@@ -1286,8 +1286,7 @@
         h.set_debuglevel(self._debuglevel)
 
         headers = dict(req.unredirected_hdrs)
-        headers.update(dict((k, v) for k, v in req.headers.items()
-                            if k not in headers))
+        headers.update((k, v) for k, v in req.headers.items() if k not in headers)
 
         # TODO(jhylton): Should this be redesigned to handle
         # persistent connections?
@@ -1299,7 +1298,7 @@
         # So make sure the connection gets closed after the (only)
         # request.
         headers["Connection"] = "close"
-        headers = dict((name.title(), val) for name, val in headers.items())
+        headers = {name.title(): val for name, val in headers.items()}
 
         if req._tunnel_host:
             tunnel_headers = {}