merge heads
diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index e450319..04e7c44 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -321,7 +321,11 @@
@staticmethod
def _handle_workers(pool):
- while pool._worker_handler._state == RUN and pool._state == RUN:
+ thread = threading.current_thread()
+
+ # Keep maintaining workers until the cache gets drained, unless the pool
+ # is terminated.
+ while thread._state == RUN or (pool._cache and thread._state != TERMINATE):
pool._maintain_pool()
time.sleep(0.1)
# send sentinel to stop workers
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index 48a4ff4..0bc056f 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -1266,6 +1266,20 @@
p.close()
p.join()
+ def test_pool_worker_lifetime_early_close(self):
+ # Issue #10332: closing a pool whose workers have limited lifetimes
+ # before all the tasks completed would make join() hang.
+ p = multiprocessing.Pool(3, maxtasksperchild=1)
+ results = []
+ for i in range(6):
+ results.append(p.apply_async(sqr, (i, 0.3)))
+ p.close()
+ p.join()
+ # check the results
+ for (j, res) in enumerate(results):
+ self.assertEqual(res.get(), sqr(j))
+
+
#
# Test that manager has expected number of shared objects left
#
diff --git a/Misc/NEWS b/Misc/NEWS
index ebc72b4..64eff6a 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -10,6 +10,9 @@
Core and Builtins
-----------------
+- Issue #13018: Fix reference leaks in error paths in dictobject.c.
+ Patch by Suman Saha.
+
- Issue #13201: Define '==' and '!=' to compare range objects based on
the sequence of values they define (instead of comparing based on
object identity).
@@ -338,6 +341,9 @@
Library
-------
+- Issue #10332: multiprocessing: fix a race condition when a Pool is closed
+ before all tasks have completed.
+
- Issue #13255: wrong docstrings in array module.
- Issue #8540: Remove deprecated Context._clamp attribute in Decimal module.
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 865feff..82735e6 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -1314,14 +1314,18 @@
PyObject *key;
Py_hash_t hash;
- if (dictresize(mp, Py_SIZE(seq)))
+ if (dictresize(mp, Py_SIZE(seq))) {
+ Py_DECREF(d);
return NULL;
+ }
while (_PyDict_Next(seq, &pos, &key, &oldvalue, &hash)) {
Py_INCREF(key);
Py_INCREF(value);
- if (insertdict(mp, key, hash, value))
+ if (insertdict(mp, key, hash, value)) {
+ Py_DECREF(d);
return NULL;
+ }
}
return d;
}
@@ -1332,14 +1336,18 @@
PyObject *key;
Py_hash_t hash;
- if (dictresize(mp, PySet_GET_SIZE(seq)))
+ if (dictresize(mp, PySet_GET_SIZE(seq))) {
+ Py_DECREF(d);
return NULL;
+ }
while (_PySet_NextEntry(seq, &pos, &key, &hash)) {
Py_INCREF(key);
Py_INCREF(value);
- if (insertdict(mp, key, hash, value))
+ if (insertdict(mp, key, hash, value)) {
+ Py_DECREF(d);
return NULL;
+ }
}
return d;
}
diff --git a/Python/import.c b/Python/import.c
index e21ae39..3842c63 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -938,12 +938,19 @@
Py_ssize_t pycache_len = sizeof(CACHEDIR) - 1;
int kind;
void *data;
+ Py_UCS4 lastsep;
/* Compute the output string size. */
len = PyUnicode_GET_LENGTH(pathstr);
/* If there is no separator, this returns -1, so
- lastsep will be 0. */
+ fname will be 0. */
fname = rightmost_sep_obj(pathstr, 0, len) + 1;
+ /* Windows: re-use the last separator character (/ or \\) when
+ appending the __pycache__ path. */
+ if (fname > 0)
+ lastsep = PyUnicode_READ_CHAR(pathstr, fname -1);
+ else
+ lastsep = SEP;
ext = fname - 1;
for(i = fname; i < len; i++)
if (PyUnicode_READ_CHAR(pathstr, i) == '.')
@@ -965,7 +972,7 @@
pos = fname;
for (i = 0; i < pycache_len; i++)
PyUnicode_WRITE(kind, data, pos++, CACHEDIR[i]);
- PyUnicode_WRITE(kind, data, pos++, SEP);
+ PyUnicode_WRITE(kind, data, pos++, lastsep);
PyUnicode_CopyCharacters(result, pos, pathstr,
fname, ext - fname);
pos += ext - fname;