blob: b350bd437343198ccebbb374e907cdf8cfa33ebb [file] [log] [blame]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001"""
2Python implementation of the io module.
3"""
4
5import os
6import abc
7import codecs
Benjamin Peterson59406a92009-03-26 17:10:29 +00008import warnings
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00009# Import _thread instead of threading to reduce startup cost
10try:
11 from _thread import allocate_lock as Lock
12except ImportError:
13 from _dummy_thread import allocate_lock as Lock
14
15import io
16from io import __all__
Benjamin Peterson8d5fd4e2009-04-02 01:03:26 +000017from io import SEEK_SET, SEEK_CUR, SEEK_END
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000018
19# open() uses st_blksize whenever we can
20DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
21
22# NOTE: Base classes defined here are registered with the "official" ABCs
23# defined in io.py. We don't use real inheritance though, because we don't
24# want to inherit the C implementations.
25
26
27class BlockingIOError(IOError):
28
29 """Exception raised when I/O would block on a non-blocking I/O stream."""
30
31 def __init__(self, errno, strerror, characters_written=0):
32 super().__init__(errno, strerror)
33 if not isinstance(characters_written, int):
34 raise TypeError("characters_written must be a integer")
35 self.characters_written = characters_written
36
37
Benjamin Peterson9990e8c2009-04-18 14:47:50 +000038def open(file: (str, bytes), mode: str = "r", buffering: int = None,
39 encoding: str = None, errors: str = None,
40 newline: str = None, closefd: bool = True) -> "IOBase":
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000041
42 r"""Open file and return a stream. Raise IOError upon failure.
43
44 file is either a text or byte string giving the name (and the path
45 if the file isn't in the current working directory) of the file to
46 be opened or an integer file descriptor of the file to be
47 wrapped. (If a file descriptor is given, it is closed when the
48 returned I/O object is closed, unless closefd is set to False.)
49
50 mode is an optional string that specifies the mode in which the file
51 is opened. It defaults to 'r' which means open for reading in text
52 mode. Other common values are 'w' for writing (truncating the file if
53 it already exists), and 'a' for appending (which on some Unix systems,
54 means that all writes append to the end of the file regardless of the
55 current seek position). In text mode, if encoding is not specified the
56 encoding used is platform dependent. (For reading and writing raw
57 bytes use binary mode and leave encoding unspecified.) The available
58 modes are:
59
60 ========= ===============================================================
61 Character Meaning
62 --------- ---------------------------------------------------------------
63 'r' open for reading (default)
64 'w' open for writing, truncating the file first
65 'a' open for writing, appending to the end of the file if it exists
66 'b' binary mode
67 't' text mode (default)
68 '+' open a disk file for updating (reading and writing)
69 'U' universal newline mode (for backwards compatibility; unneeded
70 for new code)
71 ========= ===============================================================
72
73 The default mode is 'rt' (open for reading text). For binary random
74 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
75 'r+b' opens the file without truncation.
76
77 Python distinguishes between files opened in binary and text modes,
78 even when the underlying operating system doesn't. Files opened in
79 binary mode (appending 'b' to the mode argument) return contents as
80 bytes objects without any decoding. In text mode (the default, or when
81 't' is appended to the mode argument), the contents of the file are
82 returned as strings, the bytes having been first decoded using a
83 platform-dependent encoding or using the specified encoding if given.
84
Antoine Pitrou45a43722009-12-19 21:09:58 +000085 buffering is an optional integer used to set the buffering policy.
86 Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
87 line buffering (only usable in text mode), and an integer > 1 to indicate
88 the size of a fixed-size chunk buffer. When no buffering argument is
89 given, the default buffering policy works as follows:
90
91 * Binary files are buffered in fixed-size chunks; the size of the buffer
92 is chosen using a heuristic trying to determine the underlying device's
93 "block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
94 On many systems, the buffer will typically be 4096 or 8192 bytes long.
95
96 * "Interactive" text files (files for which isatty() returns True)
97 use line buffering. Other text files use the policy described above
98 for binary files.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000099
100 encoding is the name of the encoding used to decode or encode the
101 file. This should only be used in text mode. The default encoding is
102 platform dependent, but any encoding supported by Python can be
103 passed. See the codecs module for the list of supported encodings.
104
105 errors is an optional string that specifies how encoding errors are to
106 be handled---this argument should not be used in binary mode. Pass
107 'strict' to raise a ValueError exception if there is an encoding error
108 (the default of None has the same effect), or pass 'ignore' to ignore
109 errors. (Note that ignoring encoding errors can lead to data loss.)
110 See the documentation for codecs.register for a list of the permitted
111 encoding error strings.
112
113 newline controls how universal newlines works (it only applies to text
114 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
115 follows:
116
117 * On input, if newline is None, universal newlines mode is
118 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
119 these are translated into '\n' before being returned to the
120 caller. If it is '', universal newline mode is enabled, but line
121 endings are returned to the caller untranslated. If it has any of
122 the other legal values, input lines are only terminated by the given
123 string, and the line ending is returned to the caller untranslated.
124
125 * On output, if newline is None, any '\n' characters written are
126 translated to the system default line separator, os.linesep. If
127 newline is '', no translation takes place. If newline is any of the
128 other legal values, any '\n' characters written are translated to
129 the given string.
130
131 If closefd is False, the underlying file descriptor will be kept open
132 when the file is closed. This does not work when a file name is given
133 and must be True in that case.
134
135 open() returns a file object whose type depends on the mode, and
136 through which the standard file operations such as reading and writing
137 are performed. When open() is used to open a file in a text mode ('w',
138 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
139 a file in a binary mode, the returned class varies: in read binary
140 mode, it returns a BufferedReader; in write binary and append binary
141 modes, it returns a BufferedWriter, and in read/write mode, it returns
142 a BufferedRandom.
143
144 It is also possible to use a string or bytearray as a file for both
145 reading and writing. For strings StringIO can be used like a file
146 opened in a text mode, and for bytes a BytesIO can be used like a file
147 opened in a binary mode.
148 """
149 if not isinstance(file, (str, bytes, int)):
150 raise TypeError("invalid file: %r" % file)
151 if not isinstance(mode, str):
152 raise TypeError("invalid mode: %r" % mode)
153 if buffering is not None and not isinstance(buffering, int):
154 raise TypeError("invalid buffering: %r" % buffering)
155 if encoding is not None and not isinstance(encoding, str):
156 raise TypeError("invalid encoding: %r" % encoding)
157 if errors is not None and not isinstance(errors, str):
158 raise TypeError("invalid errors: %r" % errors)
159 modes = set(mode)
160 if modes - set("arwb+tU") or len(mode) > len(modes):
161 raise ValueError("invalid mode: %r" % mode)
162 reading = "r" in modes
163 writing = "w" in modes
164 appending = "a" in modes
165 updating = "+" in modes
166 text = "t" in modes
167 binary = "b" in modes
168 if "U" in modes:
169 if writing or appending:
170 raise ValueError("can't use U and writing mode at once")
171 reading = True
172 if text and binary:
173 raise ValueError("can't have text and binary mode at once")
174 if reading + writing + appending > 1:
175 raise ValueError("can't have read/write/append mode at once")
176 if not (reading or writing or appending):
177 raise ValueError("must have exactly one of read/write/append mode")
178 if binary and encoding is not None:
179 raise ValueError("binary mode doesn't take an encoding argument")
180 if binary and errors is not None:
181 raise ValueError("binary mode doesn't take an errors argument")
182 if binary and newline is not None:
183 raise ValueError("binary mode doesn't take a newline argument")
184 raw = FileIO(file,
185 (reading and "r" or "") +
186 (writing and "w" or "") +
187 (appending and "a" or "") +
188 (updating and "+" or ""),
189 closefd)
190 if buffering is None:
191 buffering = -1
192 line_buffering = False
193 if buffering == 1 or buffering < 0 and raw.isatty():
194 buffering = -1
195 line_buffering = True
196 if buffering < 0:
197 buffering = DEFAULT_BUFFER_SIZE
198 try:
199 bs = os.fstat(raw.fileno()).st_blksize
200 except (os.error, AttributeError):
201 pass
202 else:
203 if bs > 1:
204 buffering = bs
205 if buffering < 0:
206 raise ValueError("invalid buffering size")
207 if buffering == 0:
208 if binary:
209 return raw
210 raise ValueError("can't have unbuffered text I/O")
211 if updating:
212 buffer = BufferedRandom(raw, buffering)
213 elif writing or appending:
214 buffer = BufferedWriter(raw, buffering)
215 elif reading:
216 buffer = BufferedReader(raw, buffering)
217 else:
218 raise ValueError("unknown mode: %r" % mode)
219 if binary:
220 return buffer
221 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
222 text.mode = mode
223 return text
224
225
226class DocDescriptor:
227 """Helper for builtins.open.__doc__
228 """
229 def __get__(self, obj, typ):
230 return (
231 "open(file, mode='r', buffering=None, encoding=None, "
232 "errors=None, newline=None, closefd=True)\n\n" +
233 open.__doc__)
234
235class OpenWrapper:
236 """Wrapper for builtins.open
237
238 Trick so that open won't become a bound method when stored
239 as a class variable (as dbm.dumb does).
240
241 See initstdio() in Python/pythonrun.c.
242 """
243 __doc__ = DocDescriptor()
244
245 def __new__(cls, *args, **kwargs):
246 return open(*args, **kwargs)
247
248
249class UnsupportedOperation(ValueError, IOError):
250 pass
251
252
253class IOBase(metaclass=abc.ABCMeta):
254
255 """The abstract base class for all I/O classes, acting on streams of
256 bytes. There is no public constructor.
257
258 This class provides dummy implementations for many methods that
259 derived classes can override selectively; the default implementations
260 represent a file that cannot be read, written or seeked.
261
262 Even though IOBase does not declare read, readinto, or write because
263 their signatures will vary, implementations and clients should
264 consider those methods part of the interface. Also, implementations
265 may raise a IOError when operations they do not support are called.
266
267 The basic type used for binary data read from or written to a file is
268 bytes. bytearrays are accepted too, and in some cases (such as
269 readinto) needed. Text I/O classes work with str data.
270
271 Note that calling any method (even inquiries) on a closed stream is
272 undefined. Implementations may raise IOError in this case.
273
274 IOBase (and its subclasses) support the iterator protocol, meaning
275 that an IOBase object can be iterated over yielding the lines in a
276 stream.
277
278 IOBase also supports the :keyword:`with` statement. In this example,
279 fp is closed after the suite of the with statement is complete:
280
281 with open('spam.txt', 'r') as fp:
282 fp.write('Spam and eggs!')
283 """
284
285 ### Internal ###
286
287 def _unsupported(self, name: str) -> IOError:
288 """Internal: raise an exception for unsupported operations."""
289 raise UnsupportedOperation("%s.%s() not supported" %
290 (self.__class__.__name__, name))
291
292 ### Positioning ###
293
294 def seek(self, pos: int, whence: int = 0) -> int:
295 """Change stream position.
296
297 Change the stream position to byte offset offset. offset is
298 interpreted relative to the position indicated by whence. Values
299 for whence are:
300
301 * 0 -- start of stream (the default); offset should be zero or positive
302 * 1 -- current stream position; offset may be negative
303 * 2 -- end of stream; offset is usually negative
304
305 Return the new absolute position.
306 """
307 self._unsupported("seek")
308
309 def tell(self) -> int:
310 """Return current stream position."""
311 return self.seek(0, 1)
312
313 def truncate(self, pos: int = None) -> int:
314 """Truncate file to size bytes.
315
316 Size defaults to the current IO position as reported by tell(). Return
317 the new size.
318 """
319 self._unsupported("truncate")
320
321 ### Flush and close ###
322
323 def flush(self) -> None:
324 """Flush write buffers, if applicable.
325
326 This is not implemented for read-only and non-blocking streams.
327 """
Antoine Pitroufaf90072010-05-03 16:58:19 +0000328 self._checkClosed()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000329 # XXX Should this return the number of bytes written???
330
331 __closed = False
332
333 def close(self) -> None:
334 """Flush and close the IO object.
335
336 This method has no effect if the file is already closed.
337 """
338 if not self.__closed:
Antoine Pitroufaf90072010-05-03 16:58:19 +0000339 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000340 self.__closed = True
341
342 def __del__(self) -> None:
343 """Destructor. Calls close()."""
344 # The try/except block is in case this is called at program
345 # exit time, when it's possible that globals have already been
346 # deleted, and then the close() call might fail. Since
347 # there's nothing we can do about such failures and they annoy
348 # the end users, we suppress the traceback.
349 try:
350 self.close()
351 except:
352 pass
353
354 ### Inquiries ###
355
356 def seekable(self) -> bool:
357 """Return whether object supports random access.
358
359 If False, seek(), tell() and truncate() will raise IOError.
360 This method may need to do a test seek().
361 """
362 return False
363
364 def _checkSeekable(self, msg=None):
365 """Internal: raise an IOError if file is not seekable
366 """
367 if not self.seekable():
368 raise IOError("File or stream is not seekable."
369 if msg is None else msg)
370
371
372 def readable(self) -> bool:
373 """Return whether object was opened for reading.
374
375 If False, read() will raise IOError.
376 """
377 return False
378
379 def _checkReadable(self, msg=None):
380 """Internal: raise an IOError if file is not readable
381 """
382 if not self.readable():
383 raise IOError("File or stream is not readable."
384 if msg is None else msg)
385
386 def writable(self) -> bool:
387 """Return whether object was opened for writing.
388
389 If False, write() and truncate() will raise IOError.
390 """
391 return False
392
393 def _checkWritable(self, msg=None):
394 """Internal: raise an IOError if file is not writable
395 """
396 if not self.writable():
397 raise IOError("File or stream is not writable."
398 if msg is None else msg)
399
400 @property
401 def closed(self):
402 """closed: bool. True iff the file has been closed.
403
404 For backwards compatibility, this is a property, not a predicate.
405 """
406 return self.__closed
407
408 def _checkClosed(self, msg=None):
409 """Internal: raise an ValueError if file is closed
410 """
411 if self.closed:
412 raise ValueError("I/O operation on closed file."
413 if msg is None else msg)
414
415 ### Context manager ###
416
417 def __enter__(self) -> "IOBase": # That's a forward reference
418 """Context management protocol. Returns self."""
419 self._checkClosed()
420 return self
421
422 def __exit__(self, *args) -> None:
423 """Context management protocol. Calls close()"""
424 self.close()
425
426 ### Lower-level APIs ###
427
428 # XXX Should these be present even if unimplemented?
429
430 def fileno(self) -> int:
431 """Returns underlying file descriptor if one exists.
432
433 An IOError is raised if the IO object does not use a file descriptor.
434 """
435 self._unsupported("fileno")
436
437 def isatty(self) -> bool:
438 """Return whether this is an 'interactive' stream.
439
440 Return False if it can't be determined.
441 """
442 self._checkClosed()
443 return False
444
445 ### Readline[s] and writelines ###
446
447 def readline(self, limit: int = -1) -> bytes:
448 r"""Read and return a line from the stream.
449
450 If limit is specified, at most limit bytes will be read.
451
452 The line terminator is always b'\n' for binary files; for text
453 files, the newlines argument to open can be used to select the line
454 terminator(s) recognized.
455 """
456 # For backwards compatibility, a (slowish) readline().
457 if hasattr(self, "peek"):
458 def nreadahead():
459 readahead = self.peek(1)
460 if not readahead:
461 return 1
462 n = (readahead.find(b"\n") + 1) or len(readahead)
463 if limit >= 0:
464 n = min(n, limit)
465 return n
466 else:
467 def nreadahead():
468 return 1
469 if limit is None:
470 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +0000471 elif not isinstance(limit, int):
472 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000473 res = bytearray()
474 while limit < 0 or len(res) < limit:
475 b = self.read(nreadahead())
476 if not b:
477 break
478 res += b
479 if res.endswith(b"\n"):
480 break
481 return bytes(res)
482
483 def __iter__(self):
484 self._checkClosed()
485 return self
486
487 def __next__(self):
488 line = self.readline()
489 if not line:
490 raise StopIteration
491 return line
492
493 def readlines(self, hint=None):
494 """Return a list of lines from the stream.
495
496 hint can be specified to control the number of lines read: no more
497 lines will be read if the total size (in bytes/characters) of all
498 lines so far exceeds hint.
499 """
500 if hint is None or hint <= 0:
501 return list(self)
502 n = 0
503 lines = []
504 for line in self:
505 lines.append(line)
506 n += len(line)
507 if n >= hint:
508 break
509 return lines
510
511 def writelines(self, lines):
512 self._checkClosed()
513 for line in lines:
514 self.write(line)
515
516io.IOBase.register(IOBase)
517
518
519class RawIOBase(IOBase):
520
521 """Base class for raw binary I/O."""
522
523 # The read() method is implemented by calling readinto(); derived
524 # classes that want to support read() only need to implement
525 # readinto() as a primitive operation. In general, readinto() can be
526 # more efficient than read().
527
528 # (It would be tempting to also provide an implementation of
529 # readinto() in terms of read(), in case the latter is a more suitable
530 # primitive operation, but that would lead to nasty recursion in case
531 # a subclass doesn't implement either.)
532
533 def read(self, n: int = -1) -> bytes:
534 """Read and return up to n bytes.
535
536 Returns an empty bytes object on EOF, or None if the object is
537 set not to block and has no data to read.
538 """
539 if n is None:
540 n = -1
541 if n < 0:
542 return self.readall()
543 b = bytearray(n.__index__())
544 n = self.readinto(b)
Antoine Pitroue5e75c62010-09-14 18:53:07 +0000545 if n is None:
546 return None
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000547 del b[n:]
548 return bytes(b)
549
550 def readall(self):
551 """Read until EOF, using multiple read() call."""
552 res = bytearray()
553 while True:
554 data = self.read(DEFAULT_BUFFER_SIZE)
555 if not data:
556 break
557 res += data
558 return bytes(res)
559
560 def readinto(self, b: bytearray) -> int:
561 """Read up to len(b) bytes into b.
562
563 Returns number of bytes read (0 for EOF), or None if the object
Antoine Pitroue5e75c62010-09-14 18:53:07 +0000564 is set not to block and has no data to read.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000565 """
566 self._unsupported("readinto")
567
568 def write(self, b: bytes) -> int:
569 """Write the given buffer to the IO stream.
570
571 Returns the number of bytes written, which may be less than len(b).
572 """
573 self._unsupported("write")
574
575io.RawIOBase.register(RawIOBase)
576from _io import FileIO
577RawIOBase.register(FileIO)
578
579
580class BufferedIOBase(IOBase):
581
582 """Base class for buffered IO objects.
583
584 The main difference with RawIOBase is that the read() method
585 supports omitting the size argument, and does not have a default
586 implementation that defers to readinto().
587
588 In addition, read(), readinto() and write() may raise
589 BlockingIOError if the underlying raw stream is in non-blocking
590 mode and not ready; unlike their raw counterparts, they will never
591 return None.
592
593 A typical implementation should not inherit from a RawIOBase
594 implementation, but wrap one.
595 """
596
597 def read(self, n: int = None) -> bytes:
598 """Read and return up to n bytes.
599
600 If the argument is omitted, None, or negative, reads and
601 returns all data until EOF.
602
603 If the argument is positive, and the underlying raw stream is
604 not 'interactive', multiple raw reads may be issued to satisfy
605 the byte count (unless EOF is reached first). But for
606 interactive raw streams (XXX and for pipes?), at most one raw
607 read will be issued, and a short result does not imply that
608 EOF is imminent.
609
610 Returns an empty bytes array on EOF.
611
612 Raises BlockingIOError if the underlying raw stream has no
613 data at the moment.
614 """
615 self._unsupported("read")
616
617 def read1(self, n: int=None) -> bytes:
618 """Read up to n bytes with at most one read() system call."""
619 self._unsupported("read1")
620
621 def readinto(self, b: bytearray) -> int:
622 """Read up to len(b) bytes into b.
623
624 Like read(), this may issue multiple reads to the underlying raw
625 stream, unless the latter is 'interactive'.
626
627 Returns the number of bytes read (0 for EOF).
628
629 Raises BlockingIOError if the underlying raw stream has no
630 data at the moment.
631 """
632 # XXX This ought to work with anything that supports the buffer API
633 data = self.read(len(b))
634 n = len(data)
635 try:
636 b[:n] = data
637 except TypeError as err:
638 import array
639 if not isinstance(b, array.array):
640 raise err
641 b[:n] = array.array('b', data)
642 return n
643
644 def write(self, b: bytes) -> int:
645 """Write the given buffer to the IO stream.
646
647 Return the number of bytes written, which is never less than
648 len(b).
649
650 Raises BlockingIOError if the buffer is full and the
651 underlying raw stream cannot accept more data at the moment.
652 """
653 self._unsupported("write")
654
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000655 def detach(self) -> None:
656 """
657 Separate the underlying raw stream from the buffer and return it.
658
659 After the raw stream has been detached, the buffer is in an unusable
660 state.
661 """
662 self._unsupported("detach")
663
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000664io.BufferedIOBase.register(BufferedIOBase)
665
666
667class _BufferedIOMixin(BufferedIOBase):
668
669 """A mixin implementation of BufferedIOBase with an underlying raw stream.
670
671 This passes most requests on to the underlying raw stream. It
672 does *not* provide implementations of read(), readinto() or
673 write().
674 """
675
676 def __init__(self, raw):
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000677 self._raw = raw
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000678
679 ### Positioning ###
680
681 def seek(self, pos, whence=0):
682 new_position = self.raw.seek(pos, whence)
683 if new_position < 0:
684 raise IOError("seek() returned an invalid position")
685 return new_position
686
687 def tell(self):
688 pos = self.raw.tell()
689 if pos < 0:
690 raise IOError("tell() returned an invalid position")
691 return pos
692
693 def truncate(self, pos=None):
694 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
695 # and a flush may be necessary to synch both views of the current
696 # file state.
697 self.flush()
698
699 if pos is None:
700 pos = self.tell()
701 # XXX: Should seek() be used, instead of passing the position
702 # XXX directly to truncate?
703 return self.raw.truncate(pos)
704
705 ### Flush and close ###
706
707 def flush(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000708 if self.closed:
709 raise ValueError("flush of closed file")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000710 self.raw.flush()
711
712 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000713 if self.raw is not None and not self.closed:
714 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000715 self.raw.close()
716
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000717 def detach(self):
718 if self.raw is None:
719 raise ValueError("raw stream already detached")
720 self.flush()
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000721 raw = self._raw
722 self._raw = None
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000723 return raw
724
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000725 ### Inquiries ###
726
727 def seekable(self):
728 return self.raw.seekable()
729
730 def readable(self):
731 return self.raw.readable()
732
733 def writable(self):
734 return self.raw.writable()
735
736 @property
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000737 def raw(self):
738 return self._raw
739
740 @property
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000741 def closed(self):
742 return self.raw.closed
743
744 @property
745 def name(self):
746 return self.raw.name
747
748 @property
749 def mode(self):
750 return self.raw.mode
751
Antoine Pitrou716c4442009-05-23 19:04:03 +0000752 def __repr__(self):
753 clsname = self.__class__.__name__
754 try:
755 name = self.name
756 except AttributeError:
757 return "<_pyio.{0}>".format(clsname)
758 else:
759 return "<_pyio.{0} name={1!r}>".format(clsname, name)
760
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000761 ### Lower-level APIs ###
762
763 def fileno(self):
764 return self.raw.fileno()
765
766 def isatty(self):
767 return self.raw.isatty()
768
769
770class BytesIO(BufferedIOBase):
771
772 """Buffered I/O implementation using an in-memory bytes buffer."""
773
774 def __init__(self, initial_bytes=None):
775 buf = bytearray()
776 if initial_bytes is not None:
777 buf += initial_bytes
778 self._buffer = buf
779 self._pos = 0
780
781 def getvalue(self):
782 """Return the bytes value (contents) of the buffer
783 """
784 if self.closed:
785 raise ValueError("getvalue on closed file")
786 return bytes(self._buffer)
787
788 def read(self, n=None):
789 if self.closed:
790 raise ValueError("read from closed file")
791 if n is None:
792 n = -1
793 if n < 0:
794 n = len(self._buffer)
795 if len(self._buffer) <= self._pos:
796 return b""
797 newpos = min(len(self._buffer), self._pos + n)
798 b = self._buffer[self._pos : newpos]
799 self._pos = newpos
800 return bytes(b)
801
802 def read1(self, n):
803 """This is the same as read.
804 """
805 return self.read(n)
806
807 def write(self, b):
808 if self.closed:
809 raise ValueError("write to closed file")
810 if isinstance(b, str):
811 raise TypeError("can't write str to binary stream")
812 n = len(b)
813 if n == 0:
814 return 0
815 pos = self._pos
816 if pos > len(self._buffer):
817 # Inserts null bytes between the current end of the file
818 # and the new write position.
819 padding = b'\x00' * (pos - len(self._buffer))
820 self._buffer += padding
821 self._buffer[pos:pos + n] = b
822 self._pos += n
823 return n
824
825 def seek(self, pos, whence=0):
826 if self.closed:
827 raise ValueError("seek on closed file")
828 try:
829 pos = pos.__index__()
830 except AttributeError as err:
831 raise TypeError("an integer is required") from err
832 if whence == 0:
833 if pos < 0:
834 raise ValueError("negative seek position %r" % (pos,))
835 self._pos = pos
836 elif whence == 1:
837 self._pos = max(0, self._pos + pos)
838 elif whence == 2:
839 self._pos = max(0, len(self._buffer) + pos)
840 else:
841 raise ValueError("invalid whence value")
842 return self._pos
843
844 def tell(self):
845 if self.closed:
846 raise ValueError("tell on closed file")
847 return self._pos
848
849 def truncate(self, pos=None):
850 if self.closed:
851 raise ValueError("truncate on closed file")
852 if pos is None:
853 pos = self._pos
854 elif pos < 0:
855 raise ValueError("negative truncate position %r" % (pos,))
856 del self._buffer[pos:]
Antoine Pitrou66f9fea2010-01-31 23:20:26 +0000857 return pos
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000858
859 def readable(self):
860 return True
861
862 def writable(self):
863 return True
864
865 def seekable(self):
866 return True
867
868
869class BufferedReader(_BufferedIOMixin):
870
871 """BufferedReader(raw[, buffer_size])
872
873 A buffer for a readable, sequential BaseRawIO object.
874
875 The constructor creates a BufferedReader for the given readable raw
876 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
877 is used.
878 """
879
880 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
881 """Create a new buffered reader using the given readable raw IO object.
882 """
Antoine Pitroucf4c7492009-04-19 00:09:36 +0000883 if not raw.readable():
884 raise IOError('"raw" argument must be readable.')
885
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000886 _BufferedIOMixin.__init__(self, raw)
887 if buffer_size <= 0:
888 raise ValueError("invalid buffer size")
889 self.buffer_size = buffer_size
890 self._reset_read_buf()
891 self._read_lock = Lock()
892
893 def _reset_read_buf(self):
894 self._read_buf = b""
895 self._read_pos = 0
896
897 def read(self, n=None):
898 """Read n bytes.
899
900 Returns exactly n bytes of data unless the underlying raw IO
901 stream reaches EOF or if the call would block in non-blocking
902 mode. If n is negative, read until EOF or until read() would
903 block.
904 """
905 if n is not None and n < -1:
906 raise ValueError("invalid number of bytes to read")
907 with self._read_lock:
908 return self._read_unlocked(n)
909
910 def _read_unlocked(self, n=None):
911 nodata_val = b""
912 empty_values = (b"", None)
913 buf = self._read_buf
914 pos = self._read_pos
915
916 # Special case for when the number of bytes to read is unspecified.
917 if n is None or n == -1:
918 self._reset_read_buf()
919 chunks = [buf[pos:]] # Strip the consumed bytes.
920 current_size = 0
921 while True:
922 # Read until EOF or until read() would block.
923 chunk = self.raw.read()
924 if chunk in empty_values:
925 nodata_val = chunk
926 break
927 current_size += len(chunk)
928 chunks.append(chunk)
929 return b"".join(chunks) or nodata_val
930
931 # The number of bytes to read is specified, return at most n bytes.
932 avail = len(buf) - pos # Length of the available buffered data.
933 if n <= avail:
934 # Fast path: the data to read is fully buffered.
935 self._read_pos += n
936 return buf[pos:pos+n]
937 # Slow path: read from the stream until enough bytes are read,
938 # or until an EOF occurs or until read() would block.
939 chunks = [buf[pos:]]
940 wanted = max(self.buffer_size, n)
941 while avail < n:
942 chunk = self.raw.read(wanted)
943 if chunk in empty_values:
944 nodata_val = chunk
945 break
946 avail += len(chunk)
947 chunks.append(chunk)
948 # n is more then avail only when an EOF occurred or when
949 # read() would have blocked.
950 n = min(n, avail)
951 out = b"".join(chunks)
952 self._read_buf = out[n:] # Save the extra data in the buffer.
953 self._read_pos = 0
954 return out[:n] if out else nodata_val
955
956 def peek(self, n=0):
957 """Returns buffered bytes without advancing the position.
958
959 The argument indicates a desired minimal number of bytes; we
960 do at most one raw read to satisfy it. We never return more
961 than self.buffer_size.
962 """
963 with self._read_lock:
964 return self._peek_unlocked(n)
965
966 def _peek_unlocked(self, n=0):
967 want = min(n, self.buffer_size)
968 have = len(self._read_buf) - self._read_pos
969 if have < want or have <= 0:
970 to_read = self.buffer_size - have
971 current = self.raw.read(to_read)
972 if current:
973 self._read_buf = self._read_buf[self._read_pos:] + current
974 self._read_pos = 0
975 return self._read_buf[self._read_pos:]
976
977 def read1(self, n):
978 """Reads up to n bytes, with at most one read() system call."""
979 # Returns up to n bytes. If at least one byte is buffered, we
980 # only return buffered bytes. Otherwise, we do one raw read.
981 if n < 0:
982 raise ValueError("number of bytes to read must be positive")
983 if n == 0:
984 return b""
985 with self._read_lock:
986 self._peek_unlocked(1)
987 return self._read_unlocked(
988 min(n, len(self._read_buf) - self._read_pos))
989
990 def tell(self):
991 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
992
993 def seek(self, pos, whence=0):
994 if not (0 <= whence <= 2):
995 raise ValueError("invalid whence value")
996 with self._read_lock:
997 if whence == 1:
998 pos -= len(self._read_buf) - self._read_pos
999 pos = _BufferedIOMixin.seek(self, pos, whence)
1000 self._reset_read_buf()
1001 return pos
1002
1003class BufferedWriter(_BufferedIOMixin):
1004
1005 """A buffer for a writeable sequential RawIO object.
1006
1007 The constructor creates a BufferedWriter for the given writeable raw
1008 stream. If the buffer_size is not given, it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001009 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001010 """
1011
Benjamin Peterson59406a92009-03-26 17:10:29 +00001012 _warning_stack_offset = 2
1013
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001014 def __init__(self, raw,
1015 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001016 if not raw.writable():
1017 raise IOError('"raw" argument must be writable.')
1018
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001019 _BufferedIOMixin.__init__(self, raw)
1020 if buffer_size <= 0:
1021 raise ValueError("invalid buffer size")
Benjamin Peterson59406a92009-03-26 17:10:29 +00001022 if max_buffer_size is not None:
1023 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1024 self._warning_stack_offset)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001025 self.buffer_size = buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001026 self._write_buf = bytearray()
1027 self._write_lock = Lock()
1028
1029 def write(self, b):
1030 if self.closed:
1031 raise ValueError("write to closed file")
1032 if isinstance(b, str):
1033 raise TypeError("can't write str to binary stream")
1034 with self._write_lock:
1035 # XXX we can implement some more tricks to try and avoid
1036 # partial writes
1037 if len(self._write_buf) > self.buffer_size:
1038 # We're full, so let's pre-flush the buffer
1039 try:
1040 self._flush_unlocked()
1041 except BlockingIOError as e:
1042 # We can't accept anything else.
1043 # XXX Why not just let the exception pass through?
1044 raise BlockingIOError(e.errno, e.strerror, 0)
1045 before = len(self._write_buf)
1046 self._write_buf.extend(b)
1047 written = len(self._write_buf) - before
1048 if len(self._write_buf) > self.buffer_size:
1049 try:
1050 self._flush_unlocked()
1051 except BlockingIOError as e:
Benjamin Peterson394ee002009-03-05 22:33:59 +00001052 if len(self._write_buf) > self.buffer_size:
1053 # We've hit the buffer_size. We have to accept a partial
1054 # write and cut back our buffer.
1055 overage = len(self._write_buf) - self.buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001056 written -= overage
Benjamin Peterson394ee002009-03-05 22:33:59 +00001057 self._write_buf = self._write_buf[:self.buffer_size]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001058 raise BlockingIOError(e.errno, e.strerror, written)
1059 return written
1060
1061 def truncate(self, pos=None):
1062 with self._write_lock:
1063 self._flush_unlocked()
1064 if pos is None:
1065 pos = self.raw.tell()
1066 return self.raw.truncate(pos)
1067
1068 def flush(self):
1069 with self._write_lock:
1070 self._flush_unlocked()
1071
1072 def _flush_unlocked(self):
1073 if self.closed:
1074 raise ValueError("flush of closed file")
1075 written = 0
1076 try:
1077 while self._write_buf:
1078 n = self.raw.write(self._write_buf)
1079 if n > len(self._write_buf) or n < 0:
1080 raise IOError("write() returned incorrect number of bytes")
1081 del self._write_buf[:n]
1082 written += n
1083 except BlockingIOError as e:
1084 n = e.characters_written
1085 del self._write_buf[:n]
1086 written += n
1087 raise BlockingIOError(e.errno, e.strerror, written)
1088
1089 def tell(self):
1090 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1091
1092 def seek(self, pos, whence=0):
1093 if not (0 <= whence <= 2):
1094 raise ValueError("invalid whence")
1095 with self._write_lock:
1096 self._flush_unlocked()
1097 return _BufferedIOMixin.seek(self, pos, whence)
1098
1099
1100class BufferedRWPair(BufferedIOBase):
1101
1102 """A buffered reader and writer object together.
1103
1104 A buffered reader object and buffered writer object put together to
1105 form a sequential IO object that can read and write. This is typically
1106 used with a socket or two-way pipe.
1107
1108 reader and writer are RawIOBase objects that are readable and
1109 writeable respectively. If the buffer_size is omitted it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001110 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001111 """
1112
1113 # XXX The usefulness of this (compared to having two separate IO
1114 # objects) is questionable.
1115
1116 def __init__(self, reader, writer,
1117 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1118 """Constructor.
1119
1120 The arguments are two RawIO instances.
1121 """
Benjamin Peterson59406a92009-03-26 17:10:29 +00001122 if max_buffer_size is not None:
1123 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001124
1125 if not reader.readable():
1126 raise IOError('"reader" argument must be readable.')
1127
1128 if not writer.writable():
1129 raise IOError('"writer" argument must be writable.')
1130
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001131 self.reader = BufferedReader(reader, buffer_size)
Benjamin Peterson59406a92009-03-26 17:10:29 +00001132 self.writer = BufferedWriter(writer, buffer_size)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001133
1134 def read(self, n=None):
1135 if n is None:
1136 n = -1
1137 return self.reader.read(n)
1138
1139 def readinto(self, b):
1140 return self.reader.readinto(b)
1141
1142 def write(self, b):
1143 return self.writer.write(b)
1144
1145 def peek(self, n=0):
1146 return self.reader.peek(n)
1147
1148 def read1(self, n):
1149 return self.reader.read1(n)
1150
1151 def readable(self):
1152 return self.reader.readable()
1153
1154 def writable(self):
1155 return self.writer.writable()
1156
1157 def flush(self):
1158 return self.writer.flush()
1159
1160 def close(self):
1161 self.writer.close()
1162 self.reader.close()
1163
1164 def isatty(self):
1165 return self.reader.isatty() or self.writer.isatty()
1166
1167 @property
1168 def closed(self):
1169 return self.writer.closed
1170
1171
1172class BufferedRandom(BufferedWriter, BufferedReader):
1173
1174 """A buffered interface to random access streams.
1175
1176 The constructor creates a reader and writer for a seekable stream,
1177 raw, given in the first argument. If the buffer_size is omitted it
Benjamin Peterson59406a92009-03-26 17:10:29 +00001178 defaults to DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001179 """
1180
Benjamin Peterson59406a92009-03-26 17:10:29 +00001181 _warning_stack_offset = 3
1182
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001183 def __init__(self, raw,
1184 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1185 raw._checkSeekable()
1186 BufferedReader.__init__(self, raw, buffer_size)
1187 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1188
1189 def seek(self, pos, whence=0):
1190 if not (0 <= whence <= 2):
1191 raise ValueError("invalid whence")
1192 self.flush()
1193 if self._read_buf:
1194 # Undo read ahead.
1195 with self._read_lock:
1196 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1197 # First do the raw seek, then empty the read buffer, so that
1198 # if the raw seek fails, we don't lose buffered data forever.
1199 pos = self.raw.seek(pos, whence)
1200 with self._read_lock:
1201 self._reset_read_buf()
1202 if pos < 0:
1203 raise IOError("seek() returned invalid position")
1204 return pos
1205
1206 def tell(self):
1207 if self._write_buf:
1208 return BufferedWriter.tell(self)
1209 else:
1210 return BufferedReader.tell(self)
1211
1212 def truncate(self, pos=None):
1213 if pos is None:
1214 pos = self.tell()
1215 # Use seek to flush the read buffer.
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001216 return BufferedWriter.truncate(self, pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001217
1218 def read(self, n=None):
1219 if n is None:
1220 n = -1
1221 self.flush()
1222 return BufferedReader.read(self, n)
1223
1224 def readinto(self, b):
1225 self.flush()
1226 return BufferedReader.readinto(self, b)
1227
1228 def peek(self, n=0):
1229 self.flush()
1230 return BufferedReader.peek(self, n)
1231
1232 def read1(self, n):
1233 self.flush()
1234 return BufferedReader.read1(self, n)
1235
1236 def write(self, b):
1237 if self._read_buf:
1238 # Undo readahead
1239 with self._read_lock:
1240 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1241 self._reset_read_buf()
1242 return BufferedWriter.write(self, b)
1243
1244
1245class TextIOBase(IOBase):
1246
1247 """Base class for text I/O.
1248
1249 This class provides a character and line based interface to stream
1250 I/O. There is no readinto method because Python's character strings
1251 are immutable. There is no public constructor.
1252 """
1253
1254 def read(self, n: int = -1) -> str:
1255 """Read at most n characters from stream.
1256
1257 Read from underlying buffer until we have n characters or we hit EOF.
1258 If n is negative or omitted, read until EOF.
1259 """
1260 self._unsupported("read")
1261
1262 def write(self, s: str) -> int:
1263 """Write string s to stream."""
1264 self._unsupported("write")
1265
1266 def truncate(self, pos: int = None) -> int:
1267 """Truncate size to pos."""
1268 self._unsupported("truncate")
1269
1270 def readline(self) -> str:
1271 """Read until newline or EOF.
1272
1273 Returns an empty string if EOF is hit immediately.
1274 """
1275 self._unsupported("readline")
1276
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001277 def detach(self) -> None:
1278 """
1279 Separate the underlying buffer from the TextIOBase and return it.
1280
1281 After the underlying buffer has been detached, the TextIO is in an
1282 unusable state.
1283 """
1284 self._unsupported("detach")
1285
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001286 @property
1287 def encoding(self):
1288 """Subclasses should override."""
1289 return None
1290
1291 @property
1292 def newlines(self):
1293 """Line endings translated so far.
1294
1295 Only line endings translated during reading are considered.
1296
1297 Subclasses should override.
1298 """
1299 return None
1300
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001301 @property
1302 def errors(self):
1303 """Error setting of the decoder or encoder.
1304
1305 Subclasses should override."""
1306 return None
1307
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001308io.TextIOBase.register(TextIOBase)
1309
1310
1311class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1312 r"""Codec used when reading a file in universal newlines mode. It wraps
1313 another incremental decoder, translating \r\n and \r into \n. It also
1314 records the types of newlines encountered. When used with
1315 translate=False, it ensures that the newline sequence is returned in
1316 one piece.
1317 """
1318 def __init__(self, decoder, translate, errors='strict'):
1319 codecs.IncrementalDecoder.__init__(self, errors=errors)
1320 self.translate = translate
1321 self.decoder = decoder
1322 self.seennl = 0
1323 self.pendingcr = False
1324
1325 def decode(self, input, final=False):
1326 # decode input (with the eventual \r from a previous pass)
1327 if self.decoder is None:
1328 output = input
1329 else:
1330 output = self.decoder.decode(input, final=final)
1331 if self.pendingcr and (output or final):
1332 output = "\r" + output
1333 self.pendingcr = False
1334
1335 # retain last \r even when not translating data:
1336 # then readline() is sure to get \r\n in one pass
1337 if output.endswith("\r") and not final:
1338 output = output[:-1]
1339 self.pendingcr = True
1340
1341 # Record which newlines are read
1342 crlf = output.count('\r\n')
1343 cr = output.count('\r') - crlf
1344 lf = output.count('\n') - crlf
1345 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1346 | (crlf and self._CRLF)
1347
1348 if self.translate:
1349 if crlf:
1350 output = output.replace("\r\n", "\n")
1351 if cr:
1352 output = output.replace("\r", "\n")
1353
1354 return output
1355
1356 def getstate(self):
1357 if self.decoder is None:
1358 buf = b""
1359 flag = 0
1360 else:
1361 buf, flag = self.decoder.getstate()
1362 flag <<= 1
1363 if self.pendingcr:
1364 flag |= 1
1365 return buf, flag
1366
1367 def setstate(self, state):
1368 buf, flag = state
1369 self.pendingcr = bool(flag & 1)
1370 if self.decoder is not None:
1371 self.decoder.setstate((buf, flag >> 1))
1372
1373 def reset(self):
1374 self.seennl = 0
1375 self.pendingcr = False
1376 if self.decoder is not None:
1377 self.decoder.reset()
1378
1379 _LF = 1
1380 _CR = 2
1381 _CRLF = 4
1382
1383 @property
1384 def newlines(self):
1385 return (None,
1386 "\n",
1387 "\r",
1388 ("\r", "\n"),
1389 "\r\n",
1390 ("\n", "\r\n"),
1391 ("\r", "\r\n"),
1392 ("\r", "\n", "\r\n")
1393 )[self.seennl]
1394
1395
1396class TextIOWrapper(TextIOBase):
1397
1398 r"""Character and line based layer over a BufferedIOBase object, buffer.
1399
1400 encoding gives the name of the encoding that the stream will be
1401 decoded or encoded with. It defaults to locale.getpreferredencoding.
1402
1403 errors determines the strictness of encoding and decoding (see the
1404 codecs.register) and defaults to "strict".
1405
1406 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1407 handling of line endings. If it is None, universal newlines is
1408 enabled. With this enabled, on input, the lines endings '\n', '\r',
1409 or '\r\n' are translated to '\n' before being returned to the
1410 caller. Conversely, on output, '\n' is translated to the system
1411 default line seperator, os.linesep. If newline is any other of its
1412 legal values, that newline becomes the newline when the file is read
1413 and it is returned untranslated. On output, '\n' is converted to the
1414 newline.
1415
1416 If line_buffering is True, a call to flush is implied when a call to
1417 write contains a newline character.
1418 """
1419
1420 _CHUNK_SIZE = 2048
1421
1422 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1423 line_buffering=False):
1424 if newline is not None and not isinstance(newline, str):
1425 raise TypeError("illegal newline type: %r" % (type(newline),))
1426 if newline not in (None, "", "\n", "\r", "\r\n"):
1427 raise ValueError("illegal newline value: %r" % (newline,))
1428 if encoding is None:
1429 try:
1430 encoding = os.device_encoding(buffer.fileno())
1431 except (AttributeError, UnsupportedOperation):
1432 pass
1433 if encoding is None:
1434 try:
1435 import locale
1436 except ImportError:
1437 # Importing locale may fail if Python is being built
1438 encoding = "ascii"
1439 else:
1440 encoding = locale.getpreferredencoding()
1441
1442 if not isinstance(encoding, str):
1443 raise ValueError("invalid encoding: %r" % encoding)
1444
1445 if errors is None:
1446 errors = "strict"
1447 else:
1448 if not isinstance(errors, str):
1449 raise ValueError("invalid errors: %r" % errors)
1450
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001451 self._buffer = buffer
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001452 self._line_buffering = line_buffering
1453 self._encoding = encoding
1454 self._errors = errors
1455 self._readuniversal = not newline
1456 self._readtranslate = newline is None
1457 self._readnl = newline
1458 self._writetranslate = newline != ''
1459 self._writenl = newline or os.linesep
1460 self._encoder = None
1461 self._decoder = None
1462 self._decoded_chars = '' # buffer for text returned from decoder
1463 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1464 self._snapshot = None # info for reconstructing decoder state
1465 self._seekable = self._telling = self.buffer.seekable()
1466
Antoine Pitroue4501852009-05-14 18:55:55 +00001467 if self._seekable and self.writable():
1468 position = self.buffer.tell()
1469 if position != 0:
1470 try:
1471 self._get_encoder().setstate(0)
1472 except LookupError:
1473 # Sometimes the encoder doesn't exist
1474 pass
1475
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001476 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1477 # where dec_flags is the second (integer) item of the decoder state
1478 # and next_input is the chunk of input bytes that comes next after the
1479 # snapshot point. We use this to reconstruct decoder states in tell().
1480
1481 # Naming convention:
1482 # - "bytes_..." for integer variables that count input bytes
1483 # - "chars_..." for integer variables that count decoded characters
1484
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001485 def __repr__(self):
Antoine Pitrou716c4442009-05-23 19:04:03 +00001486 try:
1487 name = self.name
1488 except AttributeError:
1489 return "<_pyio.TextIOWrapper encoding={0!r}>".format(self.encoding)
1490 else:
1491 return "<_pyio.TextIOWrapper name={0!r} encoding={1!r}>".format(
1492 name, self.encoding)
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001493
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001494 @property
1495 def encoding(self):
1496 return self._encoding
1497
1498 @property
1499 def errors(self):
1500 return self._errors
1501
1502 @property
1503 def line_buffering(self):
1504 return self._line_buffering
1505
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001506 @property
1507 def buffer(self):
1508 return self._buffer
1509
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001510 def seekable(self):
1511 return self._seekable
1512
1513 def readable(self):
1514 return self.buffer.readable()
1515
1516 def writable(self):
1517 return self.buffer.writable()
1518
1519 def flush(self):
1520 self.buffer.flush()
1521 self._telling = self._seekable
1522
1523 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +00001524 if self.buffer is not None and not self.closed:
1525 self.flush()
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001526 self.buffer.close()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001527
1528 @property
1529 def closed(self):
1530 return self.buffer.closed
1531
1532 @property
1533 def name(self):
1534 return self.buffer.name
1535
1536 def fileno(self):
1537 return self.buffer.fileno()
1538
1539 def isatty(self):
1540 return self.buffer.isatty()
1541
1542 def write(self, s: str):
1543 if self.closed:
1544 raise ValueError("write to closed file")
1545 if not isinstance(s, str):
1546 raise TypeError("can't write %s to text stream" %
1547 s.__class__.__name__)
1548 length = len(s)
1549 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1550 if haslf and self._writetranslate and self._writenl != "\n":
1551 s = s.replace("\n", self._writenl)
1552 encoder = self._encoder or self._get_encoder()
1553 # XXX What if we were just reading?
1554 b = encoder.encode(s)
1555 self.buffer.write(b)
1556 if self._line_buffering and (haslf or "\r" in s):
1557 self.flush()
1558 self._snapshot = None
1559 if self._decoder:
1560 self._decoder.reset()
1561 return length
1562
1563 def _get_encoder(self):
1564 make_encoder = codecs.getincrementalencoder(self._encoding)
1565 self._encoder = make_encoder(self._errors)
1566 return self._encoder
1567
1568 def _get_decoder(self):
1569 make_decoder = codecs.getincrementaldecoder(self._encoding)
1570 decoder = make_decoder(self._errors)
1571 if self._readuniversal:
1572 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1573 self._decoder = decoder
1574 return decoder
1575
1576 # The following three methods implement an ADT for _decoded_chars.
1577 # Text returned from the decoder is buffered here until the client
1578 # requests it by calling our read() or readline() method.
1579 def _set_decoded_chars(self, chars):
1580 """Set the _decoded_chars buffer."""
1581 self._decoded_chars = chars
1582 self._decoded_chars_used = 0
1583
1584 def _get_decoded_chars(self, n=None):
1585 """Advance into the _decoded_chars buffer."""
1586 offset = self._decoded_chars_used
1587 if n is None:
1588 chars = self._decoded_chars[offset:]
1589 else:
1590 chars = self._decoded_chars[offset:offset + n]
1591 self._decoded_chars_used += len(chars)
1592 return chars
1593
1594 def _rewind_decoded_chars(self, n):
1595 """Rewind the _decoded_chars buffer."""
1596 if self._decoded_chars_used < n:
1597 raise AssertionError("rewind decoded_chars out of bounds")
1598 self._decoded_chars_used -= n
1599
1600 def _read_chunk(self):
1601 """
1602 Read and decode the next chunk of data from the BufferedReader.
1603 """
1604
1605 # The return value is True unless EOF was reached. The decoded
1606 # string is placed in self._decoded_chars (replacing its previous
1607 # value). The entire input chunk is sent to the decoder, though
1608 # some of it may remain buffered in the decoder, yet to be
1609 # converted.
1610
1611 if self._decoder is None:
1612 raise ValueError("no decoder")
1613
1614 if self._telling:
1615 # To prepare for tell(), we need to snapshot a point in the
1616 # file where the decoder's input buffer is empty.
1617
1618 dec_buffer, dec_flags = self._decoder.getstate()
1619 # Given this, we know there was a valid snapshot point
1620 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1621
1622 # Read a chunk, decode it, and put the result in self._decoded_chars.
1623 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1624 eof = not input_chunk
1625 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1626
1627 if self._telling:
1628 # At the snapshot point, len(dec_buffer) bytes before the read,
1629 # the next input to be decoded is dec_buffer + input_chunk.
1630 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1631
1632 return not eof
1633
1634 def _pack_cookie(self, position, dec_flags=0,
1635 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1636 # The meaning of a tell() cookie is: seek to position, set the
1637 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1638 # into the decoder with need_eof as the EOF flag, then skip
1639 # chars_to_skip characters of the decoded result. For most simple
1640 # decoders, tell() will often just give a byte offset in the file.
1641 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1642 (chars_to_skip<<192) | bool(need_eof)<<256)
1643
1644 def _unpack_cookie(self, bigint):
1645 rest, position = divmod(bigint, 1<<64)
1646 rest, dec_flags = divmod(rest, 1<<64)
1647 rest, bytes_to_feed = divmod(rest, 1<<64)
1648 need_eof, chars_to_skip = divmod(rest, 1<<64)
1649 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1650
1651 def tell(self):
1652 if not self._seekable:
1653 raise IOError("underlying stream is not seekable")
1654 if not self._telling:
1655 raise IOError("telling position disabled by next() call")
1656 self.flush()
1657 position = self.buffer.tell()
1658 decoder = self._decoder
1659 if decoder is None or self._snapshot is None:
1660 if self._decoded_chars:
1661 # This should never happen.
1662 raise AssertionError("pending decoded text")
1663 return position
1664
1665 # Skip backward to the snapshot point (see _read_chunk).
1666 dec_flags, next_input = self._snapshot
1667 position -= len(next_input)
1668
1669 # How many decoded characters have been used up since the snapshot?
1670 chars_to_skip = self._decoded_chars_used
1671 if chars_to_skip == 0:
1672 # We haven't moved from the snapshot point.
1673 return self._pack_cookie(position, dec_flags)
1674
1675 # Starting from the snapshot position, we will walk the decoder
1676 # forward until it gives us enough decoded characters.
1677 saved_state = decoder.getstate()
1678 try:
1679 # Note our initial start point.
1680 decoder.setstate((b'', dec_flags))
1681 start_pos = position
1682 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1683 need_eof = 0
1684
1685 # Feed the decoder one byte at a time. As we go, note the
1686 # nearest "safe start point" before the current location
1687 # (a point where the decoder has nothing buffered, so seek()
1688 # can safely start from there and advance to this location).
1689 next_byte = bytearray(1)
1690 for next_byte[0] in next_input:
1691 bytes_fed += 1
1692 chars_decoded += len(decoder.decode(next_byte))
1693 dec_buffer, dec_flags = decoder.getstate()
1694 if not dec_buffer and chars_decoded <= chars_to_skip:
1695 # Decoder buffer is empty, so this is a safe start point.
1696 start_pos += bytes_fed
1697 chars_to_skip -= chars_decoded
1698 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1699 if chars_decoded >= chars_to_skip:
1700 break
1701 else:
1702 # We didn't get enough decoded data; signal EOF to get more.
1703 chars_decoded += len(decoder.decode(b'', final=True))
1704 need_eof = 1
1705 if chars_decoded < chars_to_skip:
1706 raise IOError("can't reconstruct logical file position")
1707
1708 # The returned cookie corresponds to the last safe start point.
1709 return self._pack_cookie(
1710 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1711 finally:
1712 decoder.setstate(saved_state)
1713
1714 def truncate(self, pos=None):
1715 self.flush()
1716 if pos is None:
1717 pos = self.tell()
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001718 return self.buffer.truncate(pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001719
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001720 def detach(self):
1721 if self.buffer is None:
1722 raise ValueError("buffer is already detached")
1723 self.flush()
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001724 buffer = self._buffer
1725 self._buffer = None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001726 return buffer
1727
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001728 def seek(self, cookie, whence=0):
1729 if self.closed:
1730 raise ValueError("tell on closed file")
1731 if not self._seekable:
1732 raise IOError("underlying stream is not seekable")
1733 if whence == 1: # seek relative to current position
1734 if cookie != 0:
1735 raise IOError("can't do nonzero cur-relative seeks")
1736 # Seeking to the current position should attempt to
1737 # sync the underlying buffer with the current position.
1738 whence = 0
1739 cookie = self.tell()
1740 if whence == 2: # seek relative to end of file
1741 if cookie != 0:
1742 raise IOError("can't do nonzero end-relative seeks")
1743 self.flush()
1744 position = self.buffer.seek(0, 2)
1745 self._set_decoded_chars('')
1746 self._snapshot = None
1747 if self._decoder:
1748 self._decoder.reset()
1749 return position
1750 if whence != 0:
1751 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1752 (whence,))
1753 if cookie < 0:
1754 raise ValueError("negative seek position %r" % (cookie,))
1755 self.flush()
1756
1757 # The strategy of seek() is to go back to the safe start point
1758 # and replay the effect of read(chars_to_skip) from there.
1759 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1760 self._unpack_cookie(cookie)
1761
1762 # Seek back to the safe start point.
1763 self.buffer.seek(start_pos)
1764 self._set_decoded_chars('')
1765 self._snapshot = None
1766
1767 # Restore the decoder to its state from the safe start point.
Benjamin Peterson9363a652009-03-05 00:42:09 +00001768 if cookie == 0 and self._decoder:
1769 self._decoder.reset()
1770 elif self._decoder or dec_flags or chars_to_skip:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001771 self._decoder = self._decoder or self._get_decoder()
1772 self._decoder.setstate((b'', dec_flags))
1773 self._snapshot = (dec_flags, b'')
1774
1775 if chars_to_skip:
1776 # Just like _read_chunk, feed the decoder and save a snapshot.
1777 input_chunk = self.buffer.read(bytes_to_feed)
1778 self._set_decoded_chars(
1779 self._decoder.decode(input_chunk, need_eof))
1780 self._snapshot = (dec_flags, input_chunk)
1781
1782 # Skip chars_to_skip of the decoded characters.
1783 if len(self._decoded_chars) < chars_to_skip:
1784 raise IOError("can't restore logical file position")
1785 self._decoded_chars_used = chars_to_skip
1786
Antoine Pitroue4501852009-05-14 18:55:55 +00001787 # Finally, reset the encoder (merely useful for proper BOM handling)
1788 try:
1789 encoder = self._encoder or self._get_encoder()
1790 except LookupError:
1791 # Sometimes the encoder doesn't exist
1792 pass
1793 else:
1794 if cookie != 0:
1795 encoder.setstate(0)
1796 else:
1797 encoder.reset()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001798 return cookie
1799
1800 def read(self, n=None):
Benjamin Petersona1b49012009-03-31 23:11:32 +00001801 self._checkReadable()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001802 if n is None:
1803 n = -1
1804 decoder = self._decoder or self._get_decoder()
1805 if n < 0:
1806 # Read everything.
1807 result = (self._get_decoded_chars() +
1808 decoder.decode(self.buffer.read(), final=True))
1809 self._set_decoded_chars('')
1810 self._snapshot = None
1811 return result
1812 else:
1813 # Keep reading chunks until we have n characters to return.
1814 eof = False
1815 result = self._get_decoded_chars(n)
1816 while len(result) < n and not eof:
1817 eof = not self._read_chunk()
1818 result += self._get_decoded_chars(n - len(result))
1819 return result
1820
1821 def __next__(self):
1822 self._telling = False
1823 line = self.readline()
1824 if not line:
1825 self._snapshot = None
1826 self._telling = self._seekable
1827 raise StopIteration
1828 return line
1829
1830 def readline(self, limit=None):
1831 if self.closed:
1832 raise ValueError("read from closed file")
1833 if limit is None:
1834 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +00001835 elif not isinstance(limit, int):
1836 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001837
1838 # Grab all the decoded text (we will rewind any extra bits later).
1839 line = self._get_decoded_chars()
1840
1841 start = 0
1842 # Make the decoder if it doesn't already exist.
1843 if not self._decoder:
1844 self._get_decoder()
1845
1846 pos = endpos = None
1847 while True:
1848 if self._readtranslate:
1849 # Newlines are already translated, only search for \n
1850 pos = line.find('\n', start)
1851 if pos >= 0:
1852 endpos = pos + 1
1853 break
1854 else:
1855 start = len(line)
1856
1857 elif self._readuniversal:
1858 # Universal newline search. Find any of \r, \r\n, \n
1859 # The decoder ensures that \r\n are not split in two pieces
1860
1861 # In C we'd look for these in parallel of course.
1862 nlpos = line.find("\n", start)
1863 crpos = line.find("\r", start)
1864 if crpos == -1:
1865 if nlpos == -1:
1866 # Nothing found
1867 start = len(line)
1868 else:
1869 # Found \n
1870 endpos = nlpos + 1
1871 break
1872 elif nlpos == -1:
1873 # Found lone \r
1874 endpos = crpos + 1
1875 break
1876 elif nlpos < crpos:
1877 # Found \n
1878 endpos = nlpos + 1
1879 break
1880 elif nlpos == crpos + 1:
1881 # Found \r\n
1882 endpos = crpos + 2
1883 break
1884 else:
1885 # Found \r
1886 endpos = crpos + 1
1887 break
1888 else:
1889 # non-universal
1890 pos = line.find(self._readnl)
1891 if pos >= 0:
1892 endpos = pos + len(self._readnl)
1893 break
1894
1895 if limit >= 0 and len(line) >= limit:
1896 endpos = limit # reached length limit
1897 break
1898
1899 # No line ending seen yet - get more data'
1900 while self._read_chunk():
1901 if self._decoded_chars:
1902 break
1903 if self._decoded_chars:
1904 line += self._get_decoded_chars()
1905 else:
1906 # end of file
1907 self._set_decoded_chars('')
1908 self._snapshot = None
1909 return line
1910
1911 if limit >= 0 and endpos > limit:
1912 endpos = limit # don't exceed limit
1913
1914 # Rewind _decoded_chars to just after the line ending we found.
1915 self._rewind_decoded_chars(len(line) - endpos)
1916 return line[:endpos]
1917
1918 @property
1919 def newlines(self):
1920 return self._decoder.newlines if self._decoder else None
1921
1922
1923class StringIO(TextIOWrapper):
1924 """Text I/O implementation using an in-memory buffer.
1925
1926 The initial_value argument sets the value of object. The newline
1927 argument is like the one of TextIOWrapper's constructor.
1928 """
1929
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001930 def __init__(self, initial_value="", newline="\n"):
1931 super(StringIO, self).__init__(BytesIO(),
1932 encoding="utf-8",
1933 errors="strict",
1934 newline=newline)
Antoine Pitrou11446482009-04-04 14:09:30 +00001935 # Issue #5645: make universal newlines semantics the same as in the
1936 # C version, even under Windows.
1937 if newline is None:
1938 self._writetranslate = False
Georg Brandl194da4a2009-08-13 09:34:05 +00001939 if initial_value is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001940 if not isinstance(initial_value, str):
Georg Brandl194da4a2009-08-13 09:34:05 +00001941 raise TypeError("initial_value must be str or None, not {0}"
1942 .format(type(initial_value).__name__))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001943 initial_value = str(initial_value)
1944 self.write(initial_value)
1945 self.seek(0)
1946
1947 def getvalue(self):
1948 self.flush()
1949 return self.buffer.getvalue().decode(self._encoding, self._errors)
Benjamin Peterson9fd459a2009-03-09 00:09:44 +00001950
1951 def __repr__(self):
1952 # TextIOWrapper tells the encoding in its repr. In StringIO,
1953 # that's a implementation detail.
1954 return object.__repr__(self)
Benjamin Petersonb487e632009-03-21 03:08:31 +00001955
1956 @property
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001957 def errors(self):
1958 return None
1959
1960 @property
Benjamin Petersonb487e632009-03-21 03:08:31 +00001961 def encoding(self):
1962 return None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001963
1964 def detach(self):
1965 # This doesn't make sense on StringIO.
1966 self._unsupported("detach")