blob: 334b34c16ae46a7ba5581fa32ed77912ad666414 [file] [log] [blame]
Christian Heimes1a6387e2008-03-26 12:49:49 +00001"""New I/O library conforming to PEP 3116.
2
3This is a prototype; hopefully eventually some of this will be
4reimplemented in C.
5
6Conformance of alternative implementations: all arguments are intended
7to be positional-only except the arguments of the open() function.
8Argument names except those of the open() function are not part of the
9specification. Instance variables and methods whose name starts with
10a leading underscore are not part of the specification (except "magic"
11names like __iter__). Only the top-level names listed in the __all__
12variable are part of the specification.
13
14XXX edge cases when switching between reading/writing
15XXX need to support 1 meaning line-buffered
16XXX whenever an argument is None, use the default value
17XXX read/write ops should check readable/writable
18XXX buffered readinto should work with arbitrary buffer objects
19XXX use incremental encoder for text output, at least for UTF-16 and UTF-8-SIG
20XXX check writable, readable and seekable in appropriate places
21"""
22
23__author__ = ("Guido van Rossum <guido@python.org>, "
24 "Mike Verdone <mike.verdone@gmail.com>, "
25 "Mark Russell <mark.russell@zen.co.uk>")
26
27__all__ = ["BlockingIOError", "open", "IOBase", "RawIOBase", "FileIO",
28 "BytesIO", "StringIO", "BufferedIOBase",
29 "BufferedReader", "BufferedWriter", "BufferedRWPair",
30 "BufferedRandom", "TextIOBase", "TextIOWrapper"]
31
32import os
33import abc
34import sys
35import codecs
36import _fileio
37import warnings
38
39# open() uses st_blksize whenever we can
40DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
41
42# py3k has only new style classes
43__metaclass__ = type
44
45class BlockingIOError(IOError):
46
47 """Exception raised when I/O would block on a non-blocking I/O stream."""
48
49 def __init__(self, errno, strerror, characters_written=0):
50 IOError.__init__(self, errno, strerror)
51 self.characters_written = characters_written
52
53
54def open(file, mode="r", buffering=None, encoding=None, errors=None,
55 newline=None, closefd=True):
56 r"""Replacement for the built-in open function.
57
58 Args:
59 file: string giving the name of the file to be opened;
60 or integer file descriptor of the file to be wrapped (*).
61 mode: optional mode string; see below.
62 buffering: optional int >= 0 giving the buffer size; values
63 can be: 0 = unbuffered, 1 = line buffered,
64 larger = fully buffered.
65 encoding: optional string giving the text encoding.
66 errors: optional string giving the encoding error handling.
67 newline: optional newlines specifier; must be None, '', '\n', '\r'
68 or '\r\n'; all other values are illegal. It controls the
69 handling of line endings. It works as follows:
70
71 * On input, if `newline` is `None`, universal newlines
72 mode is enabled. Lines in the input can end in `'\n'`,
73 `'\r'`, or `'\r\n'`, and these are translated into
74 `'\n'` before being returned to the caller. If it is
75 `''`, universal newline mode is enabled, but line endings
76 are returned to the caller untranslated. If it has any of
77 the other legal values, input lines are only terminated by
78 the given string, and the line ending is returned to the
79 caller untranslated.
80
81 * On output, if `newline` is `None`, any `'\n'`
82 characters written are translated to the system default
83 line separator, `os.linesep`. If `newline` is `''`,
84 no translation takes place. If `newline` is any of the
85 other legal values, any `'\n'` characters written are
86 translated to the given string.
87
88 closefd: optional argument to keep the underlying file descriptor
89 open when the file is closed. It must not be false when
90 a filename is given.
91
92 (*) If a file descriptor is given, it is closed when the returned
93 I/O object is closed, unless closefd=False is given.
94
95 Mode strings characters:
96 'r': open for reading (default)
97 'w': open for writing, truncating the file first
98 'a': open for writing, appending to the end if the file exists
99 'b': binary mode
100 't': text mode (default)
101 '+': open a disk file for updating (implies reading and writing)
102 'U': universal newline mode (for backwards compatibility)
103
104 Constraints:
105 - encoding or errors must not be given when a binary mode is given
106 - buffering must not be zero when a text mode is given
107
108 Returns:
109 Depending on the mode and buffering arguments, either a raw
110 binary stream, a buffered binary stream, or a buffered text
111 stream, open for reading and/or writing.
112 """
113 if not isinstance(file, (str, unicode, int)):
114 raise TypeError("invalid file: %r" % file)
115 if not isinstance(mode, str):
116 raise TypeError("invalid mode: %r" % mode)
117 if buffering is not None and not isinstance(buffering, int):
118 raise TypeError("invalid buffering: %r" % buffering)
119 if encoding is not None and not isinstance(encoding, str):
120 raise TypeError("invalid encoding: %r" % encoding)
121 if errors is not None and not isinstance(errors, str):
122 raise TypeError("invalid errors: %r" % errors)
123 modes = set(mode)
124 if modes - set("arwb+tU") or len(mode) > len(modes):
125 raise ValueError("invalid mode: %r" % mode)
126 reading = "r" in modes
127 writing = "w" in modes
128 appending = "a" in modes
129 updating = "+" in modes
130 text = "t" in modes
131 binary = "b" in modes
132 if "U" in modes:
133 if writing or appending:
134 raise ValueError("can't use U and writing mode at once")
135 reading = True
136 if text and binary:
137 raise ValueError("can't have text and binary mode at once")
138 if reading + writing + appending > 1:
139 raise ValueError("can't have read/write/append mode at once")
140 if not (reading or writing or appending):
141 raise ValueError("must have exactly one of read/write/append mode")
142 if binary and encoding is not None:
143 raise ValueError("binary mode doesn't take an encoding argument")
144 if binary and errors is not None:
145 raise ValueError("binary mode doesn't take an errors argument")
146 if binary and newline is not None:
147 raise ValueError("binary mode doesn't take a newline argument")
148 raw = FileIO(file,
149 (reading and "r" or "") +
150 (writing and "w" or "") +
151 (appending and "a" or "") +
152 (updating and "+" or ""),
153 closefd)
154 if buffering is None:
155 buffering = -1
156 line_buffering = False
157 if buffering == 1 or buffering < 0 and raw.isatty():
158 buffering = -1
159 line_buffering = True
160 if buffering < 0:
161 buffering = DEFAULT_BUFFER_SIZE
162 try:
163 bs = os.fstat(raw.fileno()).st_blksize
164 except (os.error, AttributeError):
165 pass
166 else:
167 if bs > 1:
168 buffering = bs
169 if buffering < 0:
170 raise ValueError("invalid buffering size")
171 if buffering == 0:
172 if binary:
173 raw._name = file
174 raw._mode = mode
175 return raw
176 raise ValueError("can't have unbuffered text I/O")
177 if updating:
178 buffer = BufferedRandom(raw, buffering)
179 elif writing or appending:
180 buffer = BufferedWriter(raw, buffering)
181 elif reading:
182 buffer = BufferedReader(raw, buffering)
183 else:
184 raise ValueError("unknown mode: %r" % mode)
185 if binary:
186 buffer.name = file
187 buffer.mode = mode
188 return buffer
189 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
190 text.name = file
191 text.mode = mode
192 return text
193
194class _DocDescriptor:
195 """Helper for builtins.open.__doc__
196 """
197 def __get__(self, obj, typ):
198 return (
199 "open(file, mode='r', buffering=None, encoding=None, "
200 "errors=None, newline=None, closefd=True)\n\n" +
201 open.__doc__)
202
203class OpenWrapper:
204 """Wrapper for builtins.open
205
206 Trick so that open won't become a bound method when stored
207 as a class variable (as dumbdbm does).
208
209 See initstdio() in Python/pythonrun.c.
210 """
211 __doc__ = _DocDescriptor()
212
213 def __new__(cls, *args, **kwargs):
214 return open(*args, **kwargs)
215
216
217class UnsupportedOperation(ValueError, IOError):
218 pass
219
220
221class IOBase(object):
222
223 """Base class for all I/O classes.
224
225 This class provides dummy implementations for many methods that
226 derived classes can override selectively; the default
227 implementations represent a file that cannot be read, written or
228 seeked.
229
230 This does not define read(), readinto() and write(), nor
231 readline() and friends, since their signatures vary per layer.
232
233 Not that calling any method (even inquiries) on a closed file is
234 undefined. Implementations may raise IOError in this case.
235 """
236
237 __metaclass__ = abc.ABCMeta
238
239 ### Internal ###
240
241 def _unsupported(self, name):
242 """Internal: raise an exception for unsupported operations."""
243 raise UnsupportedOperation("%s.%s() not supported" %
244 (self.__class__.__name__, name))
245
246 ### Positioning ###
247
248 def seek(self, pos, whence = 0):
249 """seek(pos: int, whence: int = 0) -> int. Change stream position.
250
251 Seek to byte offset pos relative to position indicated by whence:
252 0 Start of stream (the default). pos should be >= 0;
253 1 Current position - whence may be negative;
254 2 End of stream - whence usually negative.
255 Returns the new absolute position.
256 """
257 self._unsupported("seek")
258
259 def tell(self):
260 """tell() -> int. Return current stream position."""
261 return self.seek(0, 1)
262
263 def truncate(self, pos = None):
264 """truncate(size: int = None) -> int. Truncate file to size bytes.
265
266 Size defaults to the current IO position as reported by tell().
267 Returns the new size.
268 """
269 self._unsupported("truncate")
270
271 ### Flush and close ###
272
273 def flush(self):
274 """flush() -> None. Flushes write buffers, if applicable.
275
276 This is a no-op for read-only and non-blocking streams.
277 """
278 # XXX Should this return the number of bytes written???
279
280 __closed = False
281
282 def close(self):
283 """close() -> None. Flushes and closes the IO object.
284
285 This must be idempotent. It should also set a flag for the
286 'closed' property (see below) to test.
287 """
288 if not self.__closed:
289 try:
290 self.flush()
291 except IOError:
292 pass # If flush() fails, just give up
293 self.__closed = True
294
295 def __del__(self):
296 """Destructor. Calls close()."""
297 # The try/except block is in case this is called at program
298 # exit time, when it's possible that globals have already been
299 # deleted, and then the close() call might fail. Since
300 # there's nothing we can do about such failures and they annoy
301 # the end users, we suppress the traceback.
302 try:
303 self.close()
304 except:
305 pass
306
307 ### Inquiries ###
308
309 def seekable(self):
310 """seekable() -> bool. Return whether object supports random access.
311
312 If False, seek(), tell() and truncate() will raise IOError.
313 This method may need to do a test seek().
314 """
315 return False
316
317 def _checkSeekable(self, msg=None):
318 """Internal: raise an IOError if file is not seekable
319 """
320 if not self.seekable():
321 raise IOError("File or stream is not seekable."
322 if msg is None else msg)
323
324
325 def readable(self):
326 """readable() -> bool. Return whether object was opened for reading.
327
328 If False, read() will raise IOError.
329 """
330 return False
331
332 def _checkReadable(self, msg=None):
333 """Internal: raise an IOError if file is not readable
334 """
335 if not self.readable():
336 raise IOError("File or stream is not readable."
337 if msg is None else msg)
338
339 def writable(self):
340 """writable() -> bool. Return whether object was opened for writing.
341
342 If False, write() and truncate() will raise IOError.
343 """
344 return False
345
346 def _checkWritable(self, msg=None):
347 """Internal: raise an IOError if file is not writable
348 """
349 if not self.writable():
350 raise IOError("File or stream is not writable."
351 if msg is None else msg)
352
353 @property
354 def closed(self):
355 """closed: bool. True iff the file has been closed.
356
357 For backwards compatibility, this is a property, not a predicate.
358 """
359 return self.__closed
360
361 def _checkClosed(self, msg=None):
362 """Internal: raise an ValueError if file is closed
363 """
364 if self.closed:
365 raise ValueError("I/O operation on closed file."
366 if msg is None else msg)
367
368 ### Context manager ###
369
370 def __enter__(self):
371 """Context management protocol. Returns self."""
372 self._checkClosed()
373 return self
374
375 def __exit__(self, *args):
376 """Context management protocol. Calls close()"""
377 self.close()
378
379 ### Lower-level APIs ###
380
381 # XXX Should these be present even if unimplemented?
382
383 def fileno(self):
384 """fileno() -> int. Returns underlying file descriptor if one exists.
385
386 Raises IOError if the IO object does not use a file descriptor.
387 """
388 self._unsupported("fileno")
389
390 def isatty(self):
391 """isatty() -> int. Returns whether this is an 'interactive' stream.
392
393 Returns False if we don't know.
394 """
395 self._checkClosed()
396 return False
397
398 ### Readline[s] and writelines ###
399
400 def readline(self, limit = -1):
401 """For backwards compatibility, a (slowish) readline()."""
402 if hasattr(self, "peek"):
403 def nreadahead():
404 readahead = self.peek(1)
405 if not readahead:
406 return 1
407 n = (readahead.find(b"\n") + 1) or len(readahead)
408 if limit >= 0:
409 n = min(n, limit)
410 return n
411 else:
412 def nreadahead():
413 return 1
414 if limit is None:
415 limit = -1
416 res = bytearray()
417 while limit < 0 or len(res) < limit:
418 b = self.read(nreadahead())
419 if not b:
420 break
421 res += b
422 if res.endswith(b"\n"):
423 break
424 return bytes(res)
425
426 def __iter__(self):
427 self._checkClosed()
428 return self
429
430 def next(self):
431 line = self.readline()
432 if not line:
433 raise StopIteration
434 return line
435
436 def readlines(self, hint=None):
437 if hint is None:
438 return list(self)
439 n = 0
440 lines = []
441 for line in self:
442 lines.append(line)
443 n += len(line)
444 if n >= hint:
445 break
446 return lines
447
448 def writelines(self, lines):
449 self._checkClosed()
450 for line in lines:
451 self.write(line)
452
453
454class RawIOBase(IOBase):
455
456 """Base class for raw binary I/O.
457
458 The read() method is implemented by calling readinto(); derived
459 classes that want to support read() only need to implement
460 readinto() as a primitive operation. In general, readinto()
461 can be more efficient than read().
462
463 (It would be tempting to also provide an implementation of
464 readinto() in terms of read(), in case the latter is a more
465 suitable primitive operation, but that would lead to nasty
466 recursion in case a subclass doesn't implement either.)
467 """
468
469 def read(self, n = -1):
470 """read(n: int) -> bytes. Read and return up to n bytes.
471
472 Returns an empty bytes array on EOF, or None if the object is
473 set not to block and has no data to read.
474 """
475 if n is None:
476 n = -1
477 if n < 0:
478 return self.readall()
479 b = bytearray(n.__index__())
480 n = self.readinto(b)
481 del b[n:]
482 return bytes(b)
483
484 def readall(self):
485 """readall() -> bytes. Read until EOF, using multiple read() call."""
486 res = bytearray()
487 while True:
488 data = self.read(DEFAULT_BUFFER_SIZE)
489 if not data:
490 break
491 res += data
492 return bytes(res)
493
494 def readinto(self, b):
495 """readinto(b: bytes) -> int. Read up to len(b) bytes into b.
496
497 Returns number of bytes read (0 for EOF), or None if the object
498 is set not to block as has no data to read.
499 """
500 self._unsupported("readinto")
501
502 def write(self, b):
503 """write(b: bytes) -> int. Write the given buffer to the IO stream.
504
505 Returns the number of bytes written, which may be less than len(b).
506 """
507 self._unsupported("write")
508
509
510class FileIO(_fileio._FileIO, RawIOBase):
511
512 """Raw I/O implementation for OS files.
513
514 This multiply inherits from _FileIO and RawIOBase to make
515 isinstance(io.FileIO(), io.RawIOBase) return True without
516 requiring that _fileio._FileIO inherits from io.RawIOBase (which
517 would be hard to do since _fileio.c is written in C).
518 """
519
520 def close(self):
521 _fileio._FileIO.close(self)
522 RawIOBase.close(self)
523
524 @property
525 def name(self):
526 return self._name
527
528 @property
529 def mode(self):
530 return self._mode
531
532
533class BufferedIOBase(IOBase):
534
535 """Base class for buffered IO objects.
536
537 The main difference with RawIOBase is that the read() method
538 supports omitting the size argument, and does not have a default
539 implementation that defers to readinto().
540
541 In addition, read(), readinto() and write() may raise
542 BlockingIOError if the underlying raw stream is in non-blocking
543 mode and not ready; unlike their raw counterparts, they will never
544 return None.
545
546 A typical implementation should not inherit from a RawIOBase
547 implementation, but wrap one.
548 """
549
550 def read(self, n = None):
551 """read(n: int = None) -> bytes. Read and return up to n bytes.
552
553 If the argument is omitted, None, or negative, reads and
554 returns all data until EOF.
555
556 If the argument is positive, and the underlying raw stream is
557 not 'interactive', multiple raw reads may be issued to satisfy
558 the byte count (unless EOF is reached first). But for
559 interactive raw streams (XXX and for pipes?), at most one raw
560 read will be issued, and a short result does not imply that
561 EOF is imminent.
562
563 Returns an empty bytes array on EOF.
564
565 Raises BlockingIOError if the underlying raw stream has no
566 data at the moment.
567 """
568 self._unsupported("read")
569
570 def readinto(self, b):
571 """readinto(b: bytes) -> int. Read up to len(b) bytes into b.
572
573 Like read(), this may issue multiple reads to the underlying
574 raw stream, unless the latter is 'interactive' (XXX or a
575 pipe?).
576
577 Returns the number of bytes read (0 for EOF).
578
579 Raises BlockingIOError if the underlying raw stream has no
580 data at the moment.
581 """
582 # XXX This ought to work with anything that supports the buffer API
583 data = self.read(len(b))
584 n = len(data)
585 try:
586 b[:n] = data
587 except TypeError as err:
588 import array
589 if not isinstance(b, array.array):
590 raise err
591 b[:n] = array.array('b', data)
592 return n
593
594 def write(self, b):
595 """write(b: bytes) -> int. Write the given buffer to the IO stream.
596
597 Returns the number of bytes written, which is never less than
598 len(b).
599
600 Raises BlockingIOError if the buffer is full and the
601 underlying raw stream cannot accept more data at the moment.
602 """
603 self._unsupported("write")
604
605
606class _BufferedIOMixin(BufferedIOBase):
607
608 """A mixin implementation of BufferedIOBase with an underlying raw stream.
609
610 This passes most requests on to the underlying raw stream. It
611 does *not* provide implementations of read(), readinto() or
612 write().
613 """
614
615 def __init__(self, raw):
616 self.raw = raw
617
618 ### Positioning ###
619
620 def seek(self, pos, whence=0):
621 return self.raw.seek(pos, whence)
622
623 def tell(self):
624 return self.raw.tell()
625
626 def truncate(self, pos=None):
627 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
628 # and a flush may be necessary to synch both views of the current
629 # file state.
630 self.flush()
631
632 if pos is None:
633 pos = self.tell()
634 return self.raw.truncate(pos)
635
636 ### Flush and close ###
637
638 def flush(self):
639 self.raw.flush()
640
641 def close(self):
642 if not self.closed:
643 try:
644 self.flush()
645 except IOError:
646 pass # If flush() fails, just give up
647 self.raw.close()
648
649 ### Inquiries ###
650
651 def seekable(self):
652 return self.raw.seekable()
653
654 def readable(self):
655 return self.raw.readable()
656
657 def writable(self):
658 return self.raw.writable()
659
660 @property
661 def closed(self):
662 return self.raw.closed
663
664 ### Lower-level APIs ###
665
666 def fileno(self):
667 return self.raw.fileno()
668
669 def isatty(self):
670 return self.raw.isatty()
671
672
673class BytesIO(BufferedIOBase):
674
675 """Buffered I/O implementation using an in-memory bytes buffer."""
676
677 # XXX More docs
678
679 def __init__(self, initial_bytes=None):
680 buf = bytearray()
681 if initial_bytes is not None:
682 buf += initial_bytes
683 self._buffer = buf
684 self._pos = 0
685
686 def getvalue(self):
687 return bytes(self._buffer)
688
689 def read(self, n=None):
690 if n is None:
691 n = -1
692 if n < 0:
693 n = len(self._buffer)
694 newpos = min(len(self._buffer), self._pos + n)
695 b = self._buffer[self._pos : newpos]
696 self._pos = newpos
697 return bytes(b)
698
699 def read1(self, n):
700 return self.read(n)
701
702 def write(self, b):
703 if self.closed:
704 raise ValueError("write to closed file")
705 if isinstance(b, unicode):
706 raise TypeError("can't write unicode to binary stream")
707 n = len(b)
708 newpos = self._pos + n
709 if newpos > len(self._buffer):
710 # Inserts null bytes between the current end of the file
711 # and the new write position.
712 padding = b'\x00' * (newpos - len(self._buffer) - n)
713 self._buffer[self._pos:newpos - n] = padding
714 self._buffer[self._pos:newpos] = b
715 self._pos = newpos
716 return n
717
718 def seek(self, pos, whence=0):
719 try:
720 pos = pos.__index__()
721 except AttributeError as err:
722 raise TypeError("an integer is required") # from err
723 if whence == 0:
724 self._pos = max(0, pos)
725 elif whence == 1:
726 self._pos = max(0, self._pos + pos)
727 elif whence == 2:
728 self._pos = max(0, len(self._buffer) + pos)
729 else:
730 raise IOError("invalid whence value")
731 return self._pos
732
733 def tell(self):
734 return self._pos
735
736 def truncate(self, pos=None):
737 if pos is None:
738 pos = self._pos
739 del self._buffer[pos:]
740 return pos
741
742 def readable(self):
743 return True
744
745 def writable(self):
746 return True
747
748 def seekable(self):
749 return True
750
751
752class BufferedReader(_BufferedIOMixin):
753
754 """Buffer for a readable sequential RawIO object."""
755
756 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
757 """Create a new buffered reader using the given readable raw IO object.
758 """
759 raw._checkReadable()
760 _BufferedIOMixin.__init__(self, raw)
761 self._read_buf = b""
762 self.buffer_size = buffer_size
763
764 def read(self, n=None):
765 """Read n bytes.
766
767 Returns exactly n bytes of data unless the underlying raw IO
768 stream reaches EOF or if the call would block in non-blocking
769 mode. If n is negative, read until EOF or until read() would
770 block.
771 """
772 if n is None:
773 n = -1
774 nodata_val = b""
775 while n < 0 or len(self._read_buf) < n:
776 to_read = max(self.buffer_size,
777 n if n is not None else 2*len(self._read_buf))
778 current = self.raw.read(to_read)
779 if current in (b"", None):
780 nodata_val = current
781 break
782 self._read_buf += current
783 if self._read_buf:
784 if n < 0:
785 n = len(self._read_buf)
786 out = self._read_buf[:n]
787 self._read_buf = self._read_buf[n:]
788 else:
789 out = nodata_val
790 return out
791
792 def peek(self, n=0):
793 """Returns buffered bytes without advancing the position.
794
795 The argument indicates a desired minimal number of bytes; we
796 do at most one raw read to satisfy it. We never return more
797 than self.buffer_size.
798 """
799 want = min(n, self.buffer_size)
800 have = len(self._read_buf)
801 if have < want:
802 to_read = self.buffer_size - have
803 current = self.raw.read(to_read)
804 if current:
805 self._read_buf += current
806 return self._read_buf
807
808 def read1(self, n):
809 """Reads up to n bytes, with at most one read() system call.
810
811 Returns up to n bytes. If at least one byte is buffered, we
812 only return buffered bytes. Otherwise, we do one raw read.
813 """
814 if n <= 0:
815 return b""
816 self.peek(1)
817 return self.read(min(n, len(self._read_buf)))
818
819 def tell(self):
820 return self.raw.tell() - len(self._read_buf)
821
822 def seek(self, pos, whence=0):
823 if whence == 1:
824 pos -= len(self._read_buf)
825 pos = self.raw.seek(pos, whence)
826 self._read_buf = b""
827 return pos
828
829
830class BufferedWriter(_BufferedIOMixin):
831
832 # XXX docstring
833
834 def __init__(self, raw,
835 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
836 raw._checkWritable()
837 _BufferedIOMixin.__init__(self, raw)
838 self.buffer_size = buffer_size
839 self.max_buffer_size = (2*buffer_size
840 if max_buffer_size is None
841 else max_buffer_size)
842 self._write_buf = bytearray()
843
844 def write(self, b):
845 if self.closed:
846 raise ValueError("write to closed file")
847 if isinstance(b, unicode):
848 raise TypeError("can't write unicode to binary stream")
849 # XXX we can implement some more tricks to try and avoid partial writes
850 if len(self._write_buf) > self.buffer_size:
851 # We're full, so let's pre-flush the buffer
852 try:
853 self.flush()
854 except BlockingIOError as e:
855 # We can't accept anything else.
856 # XXX Why not just let the exception pass through?
857 raise BlockingIOError(e.errno, e.strerror, 0)
858 before = len(self._write_buf)
859 self._write_buf.extend(b)
860 written = len(self._write_buf) - before
861 if len(self._write_buf) > self.buffer_size:
862 try:
863 self.flush()
864 except BlockingIOError as e:
865 if (len(self._write_buf) > self.max_buffer_size):
866 # We've hit max_buffer_size. We have to accept a partial
867 # write and cut back our buffer.
868 overage = len(self._write_buf) - self.max_buffer_size
869 self._write_buf = self._write_buf[:self.max_buffer_size]
870 raise BlockingIOError(e.errno, e.strerror, overage)
871 return written
872
873 def flush(self):
874 if self.closed:
875 raise ValueError("flush of closed file")
876 written = 0
877 try:
878 while self._write_buf:
879 n = self.raw.write(self._write_buf)
880 del self._write_buf[:n]
881 written += n
882 except BlockingIOError as e:
883 n = e.characters_written
884 del self._write_buf[:n]
885 written += n
886 raise BlockingIOError(e.errno, e.strerror, written)
887
888 def tell(self):
889 return self.raw.tell() + len(self._write_buf)
890
891 def seek(self, pos, whence=0):
892 self.flush()
893 return self.raw.seek(pos, whence)
894
895
896class BufferedRWPair(BufferedIOBase):
897
898 """A buffered reader and writer object together.
899
900 A buffered reader object and buffered writer object put together
901 to form a sequential IO object that can read and write.
902
903 This is typically used with a socket or two-way pipe.
904
905 XXX The usefulness of this (compared to having two separate IO
906 objects) is questionable.
907 """
908
909 def __init__(self, reader, writer,
910 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
911 """Constructor.
912
913 The arguments are two RawIO instances.
914 """
915 reader._checkReadable()
916 writer._checkWritable()
917 self.reader = BufferedReader(reader, buffer_size)
918 self.writer = BufferedWriter(writer, buffer_size, max_buffer_size)
919
920 def read(self, n=None):
921 if n is None:
922 n = -1
923 return self.reader.read(n)
924
925 def readinto(self, b):
926 return self.reader.readinto(b)
927
928 def write(self, b):
929 return self.writer.write(b)
930
931 def peek(self, n=0):
932 return self.reader.peek(n)
933
934 def read1(self, n):
935 return self.reader.read1(n)
936
937 def readable(self):
938 return self.reader.readable()
939
940 def writable(self):
941 return self.writer.writable()
942
943 def flush(self):
944 return self.writer.flush()
945
946 def close(self):
947 self.writer.close()
948 self.reader.close()
949
950 def isatty(self):
951 return self.reader.isatty() or self.writer.isatty()
952
953 @property
954 def closed(self):
955 return self.writer.closed()
956
957
958class BufferedRandom(BufferedWriter, BufferedReader):
959
960 # XXX docstring
961
962 def __init__(self, raw,
963 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
964 raw._checkSeekable()
965 BufferedReader.__init__(self, raw, buffer_size)
966 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
967
968 def seek(self, pos, whence=0):
969 self.flush()
970 # First do the raw seek, then empty the read buffer, so that
971 # if the raw seek fails, we don't lose buffered data forever.
972 pos = self.raw.seek(pos, whence)
973 self._read_buf = b""
974 return pos
975
976 def tell(self):
977 if (self._write_buf):
978 return self.raw.tell() + len(self._write_buf)
979 else:
980 return self.raw.tell() - len(self._read_buf)
981
982 def read(self, n=None):
983 if n is None:
984 n = -1
985 self.flush()
986 return BufferedReader.read(self, n)
987
988 def readinto(self, b):
989 self.flush()
990 return BufferedReader.readinto(self, b)
991
992 def peek(self, n=0):
993 self.flush()
994 return BufferedReader.peek(self, n)
995
996 def read1(self, n):
997 self.flush()
998 return BufferedReader.read1(self, n)
999
1000 def write(self, b):
1001 if self._read_buf:
1002 self.raw.seek(-len(self._read_buf), 1) # Undo readahead
1003 self._read_buf = b""
1004 return BufferedWriter.write(self, b)
1005
1006
1007class TextIOBase(IOBase):
1008
1009 """Base class for text I/O.
1010
1011 This class provides a character and line based interface to stream I/O.
1012
1013 There is no readinto() method, as character strings are immutable.
1014 """
1015
1016 def read(self, n = -1):
1017 """read(n: int = -1) -> unicode. Read at most n characters from stream.
1018
1019 Read from underlying buffer until we have n characters or we hit EOF.
1020 If n is negative or omitted, read until EOF.
1021 """
1022 self._unsupported("read")
1023
1024 def write(self, s):
1025 """write(s: unicode) -> int. Write string s to stream."""
1026 self._unsupported("write")
1027
1028 def truncate(self, pos = None):
1029 """truncate(pos: int = None) -> int. Truncate size to pos."""
1030 self.flush()
1031 if pos is None:
1032 pos = self.tell()
1033 self.seek(pos)
1034 return self.buffer.truncate()
1035
1036 def readline(self):
1037 """readline() -> unicode. Read until newline or EOF.
1038
1039 Returns an empty string if EOF is hit immediately.
1040 """
1041 self._unsupported("readline")
1042
1043 @property
1044 def encoding(self):
1045 """Subclasses should override."""
1046 return None
1047
1048 @property
1049 def newlines(self):
1050 """newlines -> None | unicode | tuple of unicode. Line endings translated
1051 so far.
1052
1053 Only line endings translated during reading are considered.
1054
1055 Subclasses should override.
1056 """
1057 return None
1058
1059
1060class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1061 """Codec used when reading a file in universal newlines mode.
1062 It wraps another incremental decoder, translating \\r\\n and \\r into \\n.
1063 It also records the types of newlines encountered.
1064 When used with translate=False, it ensures that the newline sequence is
1065 returned in one piece.
1066 """
1067 def __init__(self, decoder, translate, errors='strict'):
1068 codecs.IncrementalDecoder.__init__(self, errors=errors)
1069 self.buffer = b''
1070 self.translate = translate
1071 self.decoder = decoder
1072 self.seennl = 0
1073
1074 def decode(self, input, final=False):
1075 # decode input (with the eventual \r from a previous pass)
1076 if self.buffer:
1077 input = self.buffer + input
1078
1079 output = self.decoder.decode(input, final=final)
1080
1081 # retain last \r even when not translating data:
1082 # then readline() is sure to get \r\n in one pass
1083 if output.endswith("\r") and not final:
1084 output = output[:-1]
1085 self.buffer = b'\r'
1086 else:
1087 self.buffer = b''
1088
1089 # Record which newlines are read
1090 crlf = output.count('\r\n')
1091 cr = output.count('\r') - crlf
1092 lf = output.count('\n') - crlf
1093 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1094 | (crlf and self._CRLF)
1095
1096 if self.translate:
1097 if crlf:
1098 output = output.replace("\r\n", "\n")
1099 if cr:
1100 output = output.replace("\r", "\n")
1101
1102 return output
1103
1104 def getstate(self):
1105 buf, flag = self.decoder.getstate()
1106 return buf + self.buffer, flag
1107
1108 def setstate(self, state):
1109 buf, flag = state
1110 if buf.endswith(b'\r'):
1111 self.buffer = b'\r'
1112 buf = buf[:-1]
1113 else:
1114 self.buffer = b''
1115 self.decoder.setstate((buf, flag))
1116
1117 def reset(self):
1118 self.seennl = 0
1119 self.buffer = b''
1120 self.decoder.reset()
1121
1122 _LF = 1
1123 _CR = 2
1124 _CRLF = 4
1125
1126 @property
1127 def newlines(self):
1128 return (None,
1129 "\n",
1130 "\r",
1131 ("\r", "\n"),
1132 "\r\n",
1133 ("\n", "\r\n"),
1134 ("\r", "\r\n"),
1135 ("\r", "\n", "\r\n")
1136 )[self.seennl]
1137
1138
1139class TextIOWrapper(TextIOBase):
1140
1141 """Buffered text stream.
1142
1143 Character and line based layer over a BufferedIOBase object.
1144 """
1145
1146 _CHUNK_SIZE = 128
1147
1148 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1149 line_buffering=False):
1150 if newline not in (None, "", "\n", "\r", "\r\n"):
1151 raise ValueError("illegal newline value: %r" % (newline,))
1152 if encoding is None:
1153 try:
1154 encoding = os.device_encoding(buffer.fileno())
1155 except (AttributeError, UnsupportedOperation):
1156 pass
1157 if encoding is None:
1158 try:
1159 import locale
1160 except ImportError:
1161 # Importing locale may fail if Python is being built
1162 encoding = "ascii"
1163 else:
1164 encoding = locale.getpreferredencoding()
1165
1166 if not isinstance(encoding, str):
1167 raise ValueError("invalid encoding: %r" % encoding)
1168
1169 if errors is None:
1170 errors = "strict"
1171 else:
1172 if not isinstance(errors, str):
1173 raise ValueError("invalid errors: %r" % errors)
1174
1175 self.buffer = buffer
1176 self._line_buffering = line_buffering
1177 self._encoding = encoding
1178 self._errors = errors
1179 self._readuniversal = not newline
1180 self._readtranslate = newline is None
1181 self._readnl = newline
1182 self._writetranslate = newline != ''
1183 self._writenl = newline or os.linesep
1184 self._encoder = None
1185 self._decoder = None
1186 self._decoded_chars = '' # buffer for text returned from decoder
1187 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1188 self._snapshot = None # info for reconstructing decoder state
1189 self._seekable = self._telling = self.buffer.seekable()
1190
1191 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1192 # where dec_flags is the second (integer) item of the decoder state
1193 # and next_input is the chunk of input bytes that comes next after the
1194 # snapshot point. We use this to reconstruct decoder states in tell().
1195
1196 # Naming convention:
1197 # - "bytes_..." for integer variables that count input bytes
1198 # - "chars_..." for integer variables that count decoded characters
1199
1200 def __repr__(self):
1201 return '<TIOW %x>' % id(self)
1202
1203 @property
1204 def encoding(self):
1205 return self._encoding
1206
1207 @property
1208 def errors(self):
1209 return self._errors
1210
1211 @property
1212 def line_buffering(self):
1213 return self._line_buffering
1214
1215 def seekable(self):
1216 return self._seekable
1217
1218 def flush(self):
1219 self.buffer.flush()
1220 self._telling = self._seekable
1221
1222 def close(self):
1223 try:
1224 self.flush()
1225 except:
1226 pass # If flush() fails, just give up
1227 self.buffer.close()
1228
1229 @property
1230 def closed(self):
1231 return self.buffer.closed
1232
1233 def fileno(self):
1234 return self.buffer.fileno()
1235
1236 def isatty(self):
1237 return self.buffer.isatty()
1238
1239 def write(self, s):
1240 if self.closed:
1241 raise ValueError("write to closed file")
1242 if not isinstance(s, unicode):
1243 raise TypeError("can't write %s to text stream" %
1244 s.__class__.__name__)
1245 length = len(s)
1246 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1247 if haslf and self._writetranslate and self._writenl != "\n":
1248 s = s.replace("\n", self._writenl)
1249 encoder = self._encoder or self._get_encoder()
1250 # XXX What if we were just reading?
1251 b = encoder.encode(s)
1252 self.buffer.write(b)
1253 if self._line_buffering and (haslf or "\r" in s):
1254 self.flush()
1255 self._snapshot = None
1256 if self._decoder:
1257 self._decoder.reset()
1258 return length
1259
1260 def _get_encoder(self):
1261 make_encoder = codecs.getincrementalencoder(self._encoding)
1262 self._encoder = make_encoder(self._errors)
1263 return self._encoder
1264
1265 def _get_decoder(self):
1266 make_decoder = codecs.getincrementaldecoder(self._encoding)
1267 decoder = make_decoder(self._errors)
1268 if self._readuniversal:
1269 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1270 self._decoder = decoder
1271 return decoder
1272
1273 # The following three methods implement an ADT for _decoded_chars.
1274 # Text returned from the decoder is buffered here until the client
1275 # requests it by calling our read() or readline() method.
1276 def _set_decoded_chars(self, chars):
1277 """Set the _decoded_chars buffer."""
1278 self._decoded_chars = chars
1279 self._decoded_chars_used = 0
1280
1281 def _get_decoded_chars(self, n=None):
1282 """Advance into the _decoded_chars buffer."""
1283 offset = self._decoded_chars_used
1284 if n is None:
1285 chars = self._decoded_chars[offset:]
1286 else:
1287 chars = self._decoded_chars[offset:offset + n]
1288 self._decoded_chars_used += len(chars)
1289 return chars
1290
1291 def _rewind_decoded_chars(self, n):
1292 """Rewind the _decoded_chars buffer."""
1293 if self._decoded_chars_used < n:
1294 raise AssertionError("rewind decoded_chars out of bounds")
1295 self._decoded_chars_used -= n
1296
1297 def _read_chunk(self):
1298 """
1299 Read and decode the next chunk of data from the BufferedReader.
1300
1301 The return value is True unless EOF was reached. The decoded string
1302 is placed in self._decoded_chars (replacing its previous value).
1303 The entire input chunk is sent to the decoder, though some of it
1304 may remain buffered in the decoder, yet to be converted.
1305 """
1306
1307 if self._decoder is None:
1308 raise ValueError("no decoder")
1309
1310 if self._telling:
1311 # To prepare for tell(), we need to snapshot a point in the
1312 # file where the decoder's input buffer is empty.
1313
1314 dec_buffer, dec_flags = self._decoder.getstate()
1315 # Given this, we know there was a valid snapshot point
1316 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1317
1318 # Read a chunk, decode it, and put the result in self._decoded_chars.
1319 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1320 eof = not input_chunk
1321 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1322
1323 if self._telling:
1324 # At the snapshot point, len(dec_buffer) bytes before the read,
1325 # the next input to be decoded is dec_buffer + input_chunk.
1326 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1327
1328 return not eof
1329
1330 def _pack_cookie(self, position, dec_flags=0,
1331 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1332 # The meaning of a tell() cookie is: seek to position, set the
1333 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1334 # into the decoder with need_eof as the EOF flag, then skip
1335 # chars_to_skip characters of the decoded result. For most simple
1336 # decoders, tell() will often just give a byte offset in the file.
1337 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1338 (chars_to_skip<<192) | bool(need_eof)<<256)
1339
1340 def _unpack_cookie(self, bigint):
1341 rest, position = divmod(bigint, 1<<64)
1342 rest, dec_flags = divmod(rest, 1<<64)
1343 rest, bytes_to_feed = divmod(rest, 1<<64)
1344 need_eof, chars_to_skip = divmod(rest, 1<<64)
1345 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1346
1347 def tell(self):
1348 if not self._seekable:
1349 raise IOError("underlying stream is not seekable")
1350 if not self._telling:
1351 raise IOError("telling position disabled by next() call")
1352 self.flush()
1353 position = self.buffer.tell()
1354 decoder = self._decoder
1355 if decoder is None or self._snapshot is None:
1356 if self._decoded_chars:
1357 # This should never happen.
1358 raise AssertionError("pending decoded text")
1359 return position
1360
1361 # Skip backward to the snapshot point (see _read_chunk).
1362 dec_flags, next_input = self._snapshot
1363 position -= len(next_input)
1364
1365 # How many decoded characters have been used up since the snapshot?
1366 chars_to_skip = self._decoded_chars_used
1367 if chars_to_skip == 0:
1368 # We haven't moved from the snapshot point.
1369 return self._pack_cookie(position, dec_flags)
1370
1371 # Starting from the snapshot position, we will walk the decoder
1372 # forward until it gives us enough decoded characters.
1373 saved_state = decoder.getstate()
1374 try:
1375 # Note our initial start point.
1376 decoder.setstate((b'', dec_flags))
1377 start_pos = position
1378 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1379 need_eof = 0
1380
1381 # Feed the decoder one byte at a time. As we go, note the
1382 # nearest "safe start point" before the current location
1383 # (a point where the decoder has nothing buffered, so seek()
1384 # can safely start from there and advance to this location).
1385 next_byte = bytearray(1)
1386 for next_byte[0] in next_input:
1387 bytes_fed += 1
1388 chars_decoded += len(decoder.decode(next_byte))
1389 dec_buffer, dec_flags = decoder.getstate()
1390 if not dec_buffer and chars_decoded <= chars_to_skip:
1391 # Decoder buffer is empty, so this is a safe start point.
1392 start_pos += bytes_fed
1393 chars_to_skip -= chars_decoded
1394 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1395 if chars_decoded >= chars_to_skip:
1396 break
1397 else:
1398 # We didn't get enough decoded data; signal EOF to get more.
1399 chars_decoded += len(decoder.decode(b'', final=True))
1400 need_eof = 1
1401 if chars_decoded < chars_to_skip:
1402 raise IOError("can't reconstruct logical file position")
1403
1404 # The returned cookie corresponds to the last safe start point.
1405 return self._pack_cookie(
1406 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1407 finally:
1408 decoder.setstate(saved_state)
1409
1410 def seek(self, cookie, whence=0):
1411 if not self._seekable:
1412 raise IOError("underlying stream is not seekable")
1413 if whence == 1: # seek relative to current position
1414 if cookie != 0:
1415 raise IOError("can't do nonzero cur-relative seeks")
1416 # Seeking to the current position should attempt to
1417 # sync the underlying buffer with the current position.
1418 whence = 0
1419 cookie = self.tell()
1420 if whence == 2: # seek relative to end of file
1421 if cookie != 0:
1422 raise IOError("can't do nonzero end-relative seeks")
1423 self.flush()
1424 position = self.buffer.seek(0, 2)
1425 self._set_decoded_chars('')
1426 self._snapshot = None
1427 if self._decoder:
1428 self._decoder.reset()
1429 return position
1430 if whence != 0:
1431 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1432 (whence,))
1433 if cookie < 0:
1434 raise ValueError("negative seek position %r" % (cookie,))
1435 self.flush()
1436
1437 # The strategy of seek() is to go back to the safe start point
1438 # and replay the effect of read(chars_to_skip) from there.
1439 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1440 self._unpack_cookie(cookie)
1441
1442 # Seek back to the safe start point.
1443 self.buffer.seek(start_pos)
1444 self._set_decoded_chars('')
1445 self._snapshot = None
1446
1447 # Restore the decoder to its state from the safe start point.
1448 if self._decoder or dec_flags or chars_to_skip:
1449 self._decoder = self._decoder or self._get_decoder()
1450 self._decoder.setstate((b'', dec_flags))
1451 self._snapshot = (dec_flags, b'')
1452
1453 if chars_to_skip:
1454 # Just like _read_chunk, feed the decoder and save a snapshot.
1455 input_chunk = self.buffer.read(bytes_to_feed)
1456 self._set_decoded_chars(
1457 self._decoder.decode(input_chunk, need_eof))
1458 self._snapshot = (dec_flags, input_chunk)
1459
1460 # Skip chars_to_skip of the decoded characters.
1461 if len(self._decoded_chars) < chars_to_skip:
1462 raise IOError("can't restore logical file position")
1463 self._decoded_chars_used = chars_to_skip
1464
1465 return cookie
1466
1467 def read(self, n=None):
1468 if n is None:
1469 n = -1
1470 decoder = self._decoder or self._get_decoder()
1471 if n < 0:
1472 # Read everything.
1473 result = (self._get_decoded_chars() +
1474 decoder.decode(self.buffer.read(), final=True))
1475 self._set_decoded_chars('')
1476 self._snapshot = None
1477 return result
1478 else:
1479 # Keep reading chunks until we have n characters to return.
1480 eof = False
1481 result = self._get_decoded_chars(n)
1482 while len(result) < n and not eof:
1483 eof = not self._read_chunk()
1484 result += self._get_decoded_chars(n - len(result))
1485 return result
1486
1487 def next(self):
1488 self._telling = False
1489 line = self.readline()
1490 if not line:
1491 self._snapshot = None
1492 self._telling = self._seekable
1493 raise StopIteration
1494 return line
1495
1496 def readline(self, limit=None):
1497 if limit is None:
1498 limit = -1
1499
1500 # Grab all the decoded text (we will rewind any extra bits later).
1501 line = self._get_decoded_chars()
1502
1503 start = 0
1504 decoder = self._decoder or self._get_decoder()
1505
1506 pos = endpos = None
1507 while True:
1508 if self._readtranslate:
1509 # Newlines are already translated, only search for \n
1510 pos = line.find('\n', start)
1511 if pos >= 0:
1512 endpos = pos + 1
1513 break
1514 else:
1515 start = len(line)
1516
1517 elif self._readuniversal:
1518 # Universal newline search. Find any of \r, \r\n, \n
1519 # The decoder ensures that \r\n are not split in two pieces
1520
1521 # In C we'd look for these in parallel of course.
1522 nlpos = line.find("\n", start)
1523 crpos = line.find("\r", start)
1524 if crpos == -1:
1525 if nlpos == -1:
1526 # Nothing found
1527 start = len(line)
1528 else:
1529 # Found \n
1530 endpos = nlpos + 1
1531 break
1532 elif nlpos == -1:
1533 # Found lone \r
1534 endpos = crpos + 1
1535 break
1536 elif nlpos < crpos:
1537 # Found \n
1538 endpos = nlpos + 1
1539 break
1540 elif nlpos == crpos + 1:
1541 # Found \r\n
1542 endpos = crpos + 2
1543 break
1544 else:
1545 # Found \r
1546 endpos = crpos + 1
1547 break
1548 else:
1549 # non-universal
1550 pos = line.find(self._readnl)
1551 if pos >= 0:
1552 endpos = pos + len(self._readnl)
1553 break
1554
1555 if limit >= 0 and len(line) >= limit:
1556 endpos = limit # reached length limit
1557 break
1558
1559 # No line ending seen yet - get more data
1560 more_line = ''
1561 while self._read_chunk():
1562 if self._decoded_chars:
1563 break
1564 if self._decoded_chars:
1565 line += self._get_decoded_chars()
1566 else:
1567 # end of file
1568 self._set_decoded_chars('')
1569 self._snapshot = None
1570 return line
1571
1572 if limit >= 0 and endpos > limit:
1573 endpos = limit # don't exceed limit
1574
1575 # Rewind _decoded_chars to just after the line ending we found.
1576 self._rewind_decoded_chars(len(line) - endpos)
1577 return line[:endpos]
1578
1579 @property
1580 def newlines(self):
1581 return self._decoder.newlines if self._decoder else None
1582
1583class StringIO(TextIOWrapper):
1584
1585 # XXX This is really slow, but fully functional
1586
1587 def __init__(self, initial_value="", encoding="utf-8",
1588 errors="strict", newline="\n"):
1589 super(StringIO, self).__init__(BytesIO(),
1590 encoding=encoding,
1591 errors=errors,
1592 newline=newline)
1593 if initial_value:
1594 if not isinstance(initial_value, unicode):
1595 initial_value = unicode(initial_value)
1596 self.write(initial_value)
1597 self.seek(0)
1598
1599 def getvalue(self):
1600 self.flush()
1601 return self.buffer.getvalue().decode(self._encoding, self._errors)