Fix FileIO.readall() (new_buffersize()) for large files
Truncate the buffer size to PY_SSIZE_T_MAX.
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index dc59455..ba5e096 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -564,7 +564,11 @@
*/
if (end >= SMALLCHUNK && end >= pos && pos >= 0) {
/* Add 1 so if the file were to grow we'd notice. */
- return currentsize + end - pos + 1;
+ Py_off_t bufsize = currentsize + end - pos + 1;
+ if (bufsize < PY_SSIZE_T_MAX)
+ return (size_t)bufsize;
+ else
+ return PY_SSIZE_T_MAX;
}
}
#endif