blob: ffbfda19114be211843358fddf8ed07df211422e [file] [log] [blame]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001"""
2Python implementation of the io module.
3"""
4
5import os
6import abc
7import codecs
Benjamin Peterson59406a92009-03-26 17:10:29 +00008import warnings
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00009# Import _thread instead of threading to reduce startup cost
10try:
11 from _thread import allocate_lock as Lock
12except ImportError:
13 from _dummy_thread import allocate_lock as Lock
14
15import io
16from io import __all__
Benjamin Peterson8d5fd4e2009-04-02 01:03:26 +000017from io import SEEK_SET, SEEK_CUR, SEEK_END
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000018
19# open() uses st_blksize whenever we can
20DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
21
22# NOTE: Base classes defined here are registered with the "official" ABCs
23# defined in io.py. We don't use real inheritance though, because we don't
24# want to inherit the C implementations.
25
26
27class BlockingIOError(IOError):
28
29 """Exception raised when I/O would block on a non-blocking I/O stream."""
30
31 def __init__(self, errno, strerror, characters_written=0):
32 super().__init__(errno, strerror)
33 if not isinstance(characters_written, int):
34 raise TypeError("characters_written must be a integer")
35 self.characters_written = characters_written
36
37
Benjamin Peterson9990e8c2009-04-18 14:47:50 +000038def open(file: (str, bytes), mode: str = "r", buffering: int = None,
39 encoding: str = None, errors: str = None,
40 newline: str = None, closefd: bool = True) -> "IOBase":
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000041
42 r"""Open file and return a stream. Raise IOError upon failure.
43
44 file is either a text or byte string giving the name (and the path
45 if the file isn't in the current working directory) of the file to
46 be opened or an integer file descriptor of the file to be
47 wrapped. (If a file descriptor is given, it is closed when the
48 returned I/O object is closed, unless closefd is set to False.)
49
50 mode is an optional string that specifies the mode in which the file
51 is opened. It defaults to 'r' which means open for reading in text
52 mode. Other common values are 'w' for writing (truncating the file if
53 it already exists), and 'a' for appending (which on some Unix systems,
54 means that all writes append to the end of the file regardless of the
55 current seek position). In text mode, if encoding is not specified the
56 encoding used is platform dependent. (For reading and writing raw
57 bytes use binary mode and leave encoding unspecified.) The available
58 modes are:
59
60 ========= ===============================================================
61 Character Meaning
62 --------- ---------------------------------------------------------------
63 'r' open for reading (default)
64 'w' open for writing, truncating the file first
65 'a' open for writing, appending to the end of the file if it exists
66 'b' binary mode
67 't' text mode (default)
68 '+' open a disk file for updating (reading and writing)
69 'U' universal newline mode (for backwards compatibility; unneeded
70 for new code)
71 ========= ===============================================================
72
73 The default mode is 'rt' (open for reading text). For binary random
74 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
75 'r+b' opens the file without truncation.
76
77 Python distinguishes between files opened in binary and text modes,
78 even when the underlying operating system doesn't. Files opened in
79 binary mode (appending 'b' to the mode argument) return contents as
80 bytes objects without any decoding. In text mode (the default, or when
81 't' is appended to the mode argument), the contents of the file are
82 returned as strings, the bytes having been first decoded using a
83 platform-dependent encoding or using the specified encoding if given.
84
Antoine Pitrou45a43722009-12-19 21:09:58 +000085 buffering is an optional integer used to set the buffering policy.
86 Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
87 line buffering (only usable in text mode), and an integer > 1 to indicate
88 the size of a fixed-size chunk buffer. When no buffering argument is
89 given, the default buffering policy works as follows:
90
91 * Binary files are buffered in fixed-size chunks; the size of the buffer
92 is chosen using a heuristic trying to determine the underlying device's
93 "block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
94 On many systems, the buffer will typically be 4096 or 8192 bytes long.
95
96 * "Interactive" text files (files for which isatty() returns True)
97 use line buffering. Other text files use the policy described above
98 for binary files.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000099
100 encoding is the name of the encoding used to decode or encode the
101 file. This should only be used in text mode. The default encoding is
102 platform dependent, but any encoding supported by Python can be
103 passed. See the codecs module for the list of supported encodings.
104
105 errors is an optional string that specifies how encoding errors are to
106 be handled---this argument should not be used in binary mode. Pass
107 'strict' to raise a ValueError exception if there is an encoding error
108 (the default of None has the same effect), or pass 'ignore' to ignore
109 errors. (Note that ignoring encoding errors can lead to data loss.)
110 See the documentation for codecs.register for a list of the permitted
111 encoding error strings.
112
113 newline controls how universal newlines works (it only applies to text
114 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
115 follows:
116
117 * On input, if newline is None, universal newlines mode is
118 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
119 these are translated into '\n' before being returned to the
120 caller. If it is '', universal newline mode is enabled, but line
121 endings are returned to the caller untranslated. If it has any of
122 the other legal values, input lines are only terminated by the given
123 string, and the line ending is returned to the caller untranslated.
124
125 * On output, if newline is None, any '\n' characters written are
126 translated to the system default line separator, os.linesep. If
127 newline is '', no translation takes place. If newline is any of the
128 other legal values, any '\n' characters written are translated to
129 the given string.
130
131 If closefd is False, the underlying file descriptor will be kept open
132 when the file is closed. This does not work when a file name is given
133 and must be True in that case.
134
135 open() returns a file object whose type depends on the mode, and
136 through which the standard file operations such as reading and writing
137 are performed. When open() is used to open a file in a text mode ('w',
138 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
139 a file in a binary mode, the returned class varies: in read binary
140 mode, it returns a BufferedReader; in write binary and append binary
141 modes, it returns a BufferedWriter, and in read/write mode, it returns
142 a BufferedRandom.
143
144 It is also possible to use a string or bytearray as a file for both
145 reading and writing. For strings StringIO can be used like a file
146 opened in a text mode, and for bytes a BytesIO can be used like a file
147 opened in a binary mode.
148 """
149 if not isinstance(file, (str, bytes, int)):
150 raise TypeError("invalid file: %r" % file)
151 if not isinstance(mode, str):
152 raise TypeError("invalid mode: %r" % mode)
153 if buffering is not None and not isinstance(buffering, int):
154 raise TypeError("invalid buffering: %r" % buffering)
155 if encoding is not None and not isinstance(encoding, str):
156 raise TypeError("invalid encoding: %r" % encoding)
157 if errors is not None and not isinstance(errors, str):
158 raise TypeError("invalid errors: %r" % errors)
159 modes = set(mode)
160 if modes - set("arwb+tU") or len(mode) > len(modes):
161 raise ValueError("invalid mode: %r" % mode)
162 reading = "r" in modes
163 writing = "w" in modes
164 appending = "a" in modes
165 updating = "+" in modes
166 text = "t" in modes
167 binary = "b" in modes
168 if "U" in modes:
169 if writing or appending:
170 raise ValueError("can't use U and writing mode at once")
171 reading = True
172 if text and binary:
173 raise ValueError("can't have text and binary mode at once")
174 if reading + writing + appending > 1:
175 raise ValueError("can't have read/write/append mode at once")
176 if not (reading or writing or appending):
177 raise ValueError("must have exactly one of read/write/append mode")
178 if binary and encoding is not None:
179 raise ValueError("binary mode doesn't take an encoding argument")
180 if binary and errors is not None:
181 raise ValueError("binary mode doesn't take an errors argument")
182 if binary and newline is not None:
183 raise ValueError("binary mode doesn't take a newline argument")
184 raw = FileIO(file,
185 (reading and "r" or "") +
186 (writing and "w" or "") +
187 (appending and "a" or "") +
188 (updating and "+" or ""),
189 closefd)
190 if buffering is None:
191 buffering = -1
192 line_buffering = False
193 if buffering == 1 or buffering < 0 and raw.isatty():
194 buffering = -1
195 line_buffering = True
196 if buffering < 0:
197 buffering = DEFAULT_BUFFER_SIZE
198 try:
199 bs = os.fstat(raw.fileno()).st_blksize
200 except (os.error, AttributeError):
201 pass
202 else:
203 if bs > 1:
204 buffering = bs
205 if buffering < 0:
206 raise ValueError("invalid buffering size")
207 if buffering == 0:
208 if binary:
209 return raw
210 raise ValueError("can't have unbuffered text I/O")
211 if updating:
212 buffer = BufferedRandom(raw, buffering)
213 elif writing or appending:
214 buffer = BufferedWriter(raw, buffering)
215 elif reading:
216 buffer = BufferedReader(raw, buffering)
217 else:
218 raise ValueError("unknown mode: %r" % mode)
219 if binary:
220 return buffer
221 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
222 text.mode = mode
223 return text
224
225
226class DocDescriptor:
227 """Helper for builtins.open.__doc__
228 """
229 def __get__(self, obj, typ):
230 return (
231 "open(file, mode='r', buffering=None, encoding=None, "
232 "errors=None, newline=None, closefd=True)\n\n" +
233 open.__doc__)
234
235class OpenWrapper:
236 """Wrapper for builtins.open
237
238 Trick so that open won't become a bound method when stored
239 as a class variable (as dbm.dumb does).
240
241 See initstdio() in Python/pythonrun.c.
242 """
243 __doc__ = DocDescriptor()
244
245 def __new__(cls, *args, **kwargs):
246 return open(*args, **kwargs)
247
248
249class UnsupportedOperation(ValueError, IOError):
250 pass
251
252
253class IOBase(metaclass=abc.ABCMeta):
254
255 """The abstract base class for all I/O classes, acting on streams of
256 bytes. There is no public constructor.
257
258 This class provides dummy implementations for many methods that
259 derived classes can override selectively; the default implementations
260 represent a file that cannot be read, written or seeked.
261
262 Even though IOBase does not declare read, readinto, or write because
263 their signatures will vary, implementations and clients should
264 consider those methods part of the interface. Also, implementations
265 may raise a IOError when operations they do not support are called.
266
267 The basic type used for binary data read from or written to a file is
268 bytes. bytearrays are accepted too, and in some cases (such as
269 readinto) needed. Text I/O classes work with str data.
270
271 Note that calling any method (even inquiries) on a closed stream is
272 undefined. Implementations may raise IOError in this case.
273
274 IOBase (and its subclasses) support the iterator protocol, meaning
275 that an IOBase object can be iterated over yielding the lines in a
276 stream.
277
278 IOBase also supports the :keyword:`with` statement. In this example,
279 fp is closed after the suite of the with statement is complete:
280
281 with open('spam.txt', 'r') as fp:
282 fp.write('Spam and eggs!')
283 """
284
285 ### Internal ###
286
287 def _unsupported(self, name: str) -> IOError:
288 """Internal: raise an exception for unsupported operations."""
289 raise UnsupportedOperation("%s.%s() not supported" %
290 (self.__class__.__name__, name))
291
292 ### Positioning ###
293
294 def seek(self, pos: int, whence: int = 0) -> int:
295 """Change stream position.
296
297 Change the stream position to byte offset offset. offset is
298 interpreted relative to the position indicated by whence. Values
299 for whence are:
300
301 * 0 -- start of stream (the default); offset should be zero or positive
302 * 1 -- current stream position; offset may be negative
303 * 2 -- end of stream; offset is usually negative
304
305 Return the new absolute position.
306 """
307 self._unsupported("seek")
308
309 def tell(self) -> int:
310 """Return current stream position."""
311 return self.seek(0, 1)
312
313 def truncate(self, pos: int = None) -> int:
314 """Truncate file to size bytes.
315
316 Size defaults to the current IO position as reported by tell(). Return
317 the new size.
318 """
319 self._unsupported("truncate")
320
321 ### Flush and close ###
322
323 def flush(self) -> None:
324 """Flush write buffers, if applicable.
325
326 This is not implemented for read-only and non-blocking streams.
327 """
Antoine Pitroufaf90072010-05-03 16:58:19 +0000328 self._checkClosed()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000329 # XXX Should this return the number of bytes written???
330
331 __closed = False
332
333 def close(self) -> None:
334 """Flush and close the IO object.
335
336 This method has no effect if the file is already closed.
337 """
338 if not self.__closed:
Antoine Pitroufaf90072010-05-03 16:58:19 +0000339 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000340 self.__closed = True
341
342 def __del__(self) -> None:
343 """Destructor. Calls close()."""
344 # The try/except block is in case this is called at program
345 # exit time, when it's possible that globals have already been
346 # deleted, and then the close() call might fail. Since
347 # there's nothing we can do about such failures and they annoy
348 # the end users, we suppress the traceback.
349 try:
350 self.close()
351 except:
352 pass
353
354 ### Inquiries ###
355
356 def seekable(self) -> bool:
357 """Return whether object supports random access.
358
359 If False, seek(), tell() and truncate() will raise IOError.
360 This method may need to do a test seek().
361 """
362 return False
363
364 def _checkSeekable(self, msg=None):
365 """Internal: raise an IOError if file is not seekable
366 """
367 if not self.seekable():
368 raise IOError("File or stream is not seekable."
369 if msg is None else msg)
370
371
372 def readable(self) -> bool:
373 """Return whether object was opened for reading.
374
375 If False, read() will raise IOError.
376 """
377 return False
378
379 def _checkReadable(self, msg=None):
380 """Internal: raise an IOError if file is not readable
381 """
382 if not self.readable():
383 raise IOError("File or stream is not readable."
384 if msg is None else msg)
385
386 def writable(self) -> bool:
387 """Return whether object was opened for writing.
388
389 If False, write() and truncate() will raise IOError.
390 """
391 return False
392
393 def _checkWritable(self, msg=None):
394 """Internal: raise an IOError if file is not writable
395 """
396 if not self.writable():
397 raise IOError("File or stream is not writable."
398 if msg is None else msg)
399
400 @property
401 def closed(self):
402 """closed: bool. True iff the file has been closed.
403
404 For backwards compatibility, this is a property, not a predicate.
405 """
406 return self.__closed
407
408 def _checkClosed(self, msg=None):
409 """Internal: raise an ValueError if file is closed
410 """
411 if self.closed:
412 raise ValueError("I/O operation on closed file."
413 if msg is None else msg)
414
415 ### Context manager ###
416
417 def __enter__(self) -> "IOBase": # That's a forward reference
418 """Context management protocol. Returns self."""
419 self._checkClosed()
420 return self
421
422 def __exit__(self, *args) -> None:
423 """Context management protocol. Calls close()"""
424 self.close()
425
426 ### Lower-level APIs ###
427
428 # XXX Should these be present even if unimplemented?
429
430 def fileno(self) -> int:
431 """Returns underlying file descriptor if one exists.
432
433 An IOError is raised if the IO object does not use a file descriptor.
434 """
435 self._unsupported("fileno")
436
437 def isatty(self) -> bool:
438 """Return whether this is an 'interactive' stream.
439
440 Return False if it can't be determined.
441 """
442 self._checkClosed()
443 return False
444
445 ### Readline[s] and writelines ###
446
447 def readline(self, limit: int = -1) -> bytes:
448 r"""Read and return a line from the stream.
449
450 If limit is specified, at most limit bytes will be read.
451
452 The line terminator is always b'\n' for binary files; for text
453 files, the newlines argument to open can be used to select the line
454 terminator(s) recognized.
455 """
456 # For backwards compatibility, a (slowish) readline().
457 if hasattr(self, "peek"):
458 def nreadahead():
459 readahead = self.peek(1)
460 if not readahead:
461 return 1
462 n = (readahead.find(b"\n") + 1) or len(readahead)
463 if limit >= 0:
464 n = min(n, limit)
465 return n
466 else:
467 def nreadahead():
468 return 1
469 if limit is None:
470 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +0000471 elif not isinstance(limit, int):
472 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000473 res = bytearray()
474 while limit < 0 or len(res) < limit:
475 b = self.read(nreadahead())
476 if not b:
477 break
478 res += b
479 if res.endswith(b"\n"):
480 break
481 return bytes(res)
482
483 def __iter__(self):
484 self._checkClosed()
485 return self
486
487 def __next__(self):
488 line = self.readline()
489 if not line:
490 raise StopIteration
491 return line
492
493 def readlines(self, hint=None):
494 """Return a list of lines from the stream.
495
496 hint can be specified to control the number of lines read: no more
497 lines will be read if the total size (in bytes/characters) of all
498 lines so far exceeds hint.
499 """
500 if hint is None or hint <= 0:
501 return list(self)
502 n = 0
503 lines = []
504 for line in self:
505 lines.append(line)
506 n += len(line)
507 if n >= hint:
508 break
509 return lines
510
511 def writelines(self, lines):
512 self._checkClosed()
513 for line in lines:
514 self.write(line)
515
516io.IOBase.register(IOBase)
517
518
519class RawIOBase(IOBase):
520
521 """Base class for raw binary I/O."""
522
523 # The read() method is implemented by calling readinto(); derived
524 # classes that want to support read() only need to implement
525 # readinto() as a primitive operation. In general, readinto() can be
526 # more efficient than read().
527
528 # (It would be tempting to also provide an implementation of
529 # readinto() in terms of read(), in case the latter is a more suitable
530 # primitive operation, but that would lead to nasty recursion in case
531 # a subclass doesn't implement either.)
532
533 def read(self, n: int = -1) -> bytes:
534 """Read and return up to n bytes.
535
536 Returns an empty bytes object on EOF, or None if the object is
537 set not to block and has no data to read.
538 """
539 if n is None:
540 n = -1
541 if n < 0:
542 return self.readall()
543 b = bytearray(n.__index__())
544 n = self.readinto(b)
545 del b[n:]
546 return bytes(b)
547
548 def readall(self):
549 """Read until EOF, using multiple read() call."""
550 res = bytearray()
551 while True:
552 data = self.read(DEFAULT_BUFFER_SIZE)
553 if not data:
554 break
555 res += data
556 return bytes(res)
557
558 def readinto(self, b: bytearray) -> int:
559 """Read up to len(b) bytes into b.
560
561 Returns number of bytes read (0 for EOF), or None if the object
562 is set not to block as has no data to read.
563 """
564 self._unsupported("readinto")
565
566 def write(self, b: bytes) -> int:
567 """Write the given buffer to the IO stream.
568
569 Returns the number of bytes written, which may be less than len(b).
570 """
571 self._unsupported("write")
572
573io.RawIOBase.register(RawIOBase)
574from _io import FileIO
575RawIOBase.register(FileIO)
576
577
578class BufferedIOBase(IOBase):
579
580 """Base class for buffered IO objects.
581
582 The main difference with RawIOBase is that the read() method
583 supports omitting the size argument, and does not have a default
584 implementation that defers to readinto().
585
586 In addition, read(), readinto() and write() may raise
587 BlockingIOError if the underlying raw stream is in non-blocking
588 mode and not ready; unlike their raw counterparts, they will never
589 return None.
590
591 A typical implementation should not inherit from a RawIOBase
592 implementation, but wrap one.
593 """
594
595 def read(self, n: int = None) -> bytes:
596 """Read and return up to n bytes.
597
598 If the argument is omitted, None, or negative, reads and
599 returns all data until EOF.
600
601 If the argument is positive, and the underlying raw stream is
602 not 'interactive', multiple raw reads may be issued to satisfy
603 the byte count (unless EOF is reached first). But for
604 interactive raw streams (XXX and for pipes?), at most one raw
605 read will be issued, and a short result does not imply that
606 EOF is imminent.
607
608 Returns an empty bytes array on EOF.
609
610 Raises BlockingIOError if the underlying raw stream has no
611 data at the moment.
612 """
613 self._unsupported("read")
614
615 def read1(self, n: int=None) -> bytes:
616 """Read up to n bytes with at most one read() system call."""
617 self._unsupported("read1")
618
619 def readinto(self, b: bytearray) -> int:
620 """Read up to len(b) bytes into b.
621
622 Like read(), this may issue multiple reads to the underlying raw
623 stream, unless the latter is 'interactive'.
624
625 Returns the number of bytes read (0 for EOF).
626
627 Raises BlockingIOError if the underlying raw stream has no
628 data at the moment.
629 """
630 # XXX This ought to work with anything that supports the buffer API
631 data = self.read(len(b))
632 n = len(data)
633 try:
634 b[:n] = data
635 except TypeError as err:
636 import array
637 if not isinstance(b, array.array):
638 raise err
639 b[:n] = array.array('b', data)
640 return n
641
642 def write(self, b: bytes) -> int:
643 """Write the given buffer to the IO stream.
644
645 Return the number of bytes written, which is never less than
646 len(b).
647
648 Raises BlockingIOError if the buffer is full and the
649 underlying raw stream cannot accept more data at the moment.
650 """
651 self._unsupported("write")
652
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000653 def detach(self) -> None:
654 """
655 Separate the underlying raw stream from the buffer and return it.
656
657 After the raw stream has been detached, the buffer is in an unusable
658 state.
659 """
660 self._unsupported("detach")
661
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000662io.BufferedIOBase.register(BufferedIOBase)
663
664
665class _BufferedIOMixin(BufferedIOBase):
666
667 """A mixin implementation of BufferedIOBase with an underlying raw stream.
668
669 This passes most requests on to the underlying raw stream. It
670 does *not* provide implementations of read(), readinto() or
671 write().
672 """
673
674 def __init__(self, raw):
675 self.raw = raw
676
677 ### Positioning ###
678
679 def seek(self, pos, whence=0):
680 new_position = self.raw.seek(pos, whence)
681 if new_position < 0:
682 raise IOError("seek() returned an invalid position")
683 return new_position
684
685 def tell(self):
686 pos = self.raw.tell()
687 if pos < 0:
688 raise IOError("tell() returned an invalid position")
689 return pos
690
691 def truncate(self, pos=None):
692 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
693 # and a flush may be necessary to synch both views of the current
694 # file state.
695 self.flush()
696
697 if pos is None:
698 pos = self.tell()
699 # XXX: Should seek() be used, instead of passing the position
700 # XXX directly to truncate?
701 return self.raw.truncate(pos)
702
703 ### Flush and close ###
704
705 def flush(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000706 if self.closed:
707 raise ValueError("flush of closed file")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000708 self.raw.flush()
709
710 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000711 if self.raw is not None and not self.closed:
712 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000713 self.raw.close()
714
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000715 def detach(self):
716 if self.raw is None:
717 raise ValueError("raw stream already detached")
718 self.flush()
719 raw = self.raw
720 self.raw = None
721 return raw
722
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000723 ### Inquiries ###
724
725 def seekable(self):
726 return self.raw.seekable()
727
728 def readable(self):
729 return self.raw.readable()
730
731 def writable(self):
732 return self.raw.writable()
733
734 @property
735 def closed(self):
736 return self.raw.closed
737
738 @property
739 def name(self):
740 return self.raw.name
741
742 @property
743 def mode(self):
744 return self.raw.mode
745
Antoine Pitrou716c4442009-05-23 19:04:03 +0000746 def __repr__(self):
747 clsname = self.__class__.__name__
748 try:
749 name = self.name
750 except AttributeError:
751 return "<_pyio.{0}>".format(clsname)
752 else:
753 return "<_pyio.{0} name={1!r}>".format(clsname, name)
754
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000755 ### Lower-level APIs ###
756
757 def fileno(self):
758 return self.raw.fileno()
759
760 def isatty(self):
761 return self.raw.isatty()
762
763
764class BytesIO(BufferedIOBase):
765
766 """Buffered I/O implementation using an in-memory bytes buffer."""
767
768 def __init__(self, initial_bytes=None):
769 buf = bytearray()
770 if initial_bytes is not None:
771 buf += initial_bytes
772 self._buffer = buf
773 self._pos = 0
774
775 def getvalue(self):
776 """Return the bytes value (contents) of the buffer
777 """
778 if self.closed:
779 raise ValueError("getvalue on closed file")
780 return bytes(self._buffer)
781
782 def read(self, n=None):
783 if self.closed:
784 raise ValueError("read from closed file")
785 if n is None:
786 n = -1
787 if n < 0:
788 n = len(self._buffer)
789 if len(self._buffer) <= self._pos:
790 return b""
791 newpos = min(len(self._buffer), self._pos + n)
792 b = self._buffer[self._pos : newpos]
793 self._pos = newpos
794 return bytes(b)
795
796 def read1(self, n):
797 """This is the same as read.
798 """
799 return self.read(n)
800
801 def write(self, b):
802 if self.closed:
803 raise ValueError("write to closed file")
804 if isinstance(b, str):
805 raise TypeError("can't write str to binary stream")
806 n = len(b)
807 if n == 0:
808 return 0
809 pos = self._pos
810 if pos > len(self._buffer):
811 # Inserts null bytes between the current end of the file
812 # and the new write position.
813 padding = b'\x00' * (pos - len(self._buffer))
814 self._buffer += padding
815 self._buffer[pos:pos + n] = b
816 self._pos += n
817 return n
818
819 def seek(self, pos, whence=0):
820 if self.closed:
821 raise ValueError("seek on closed file")
822 try:
823 pos = pos.__index__()
824 except AttributeError as err:
825 raise TypeError("an integer is required") from err
826 if whence == 0:
827 if pos < 0:
828 raise ValueError("negative seek position %r" % (pos,))
829 self._pos = pos
830 elif whence == 1:
831 self._pos = max(0, self._pos + pos)
832 elif whence == 2:
833 self._pos = max(0, len(self._buffer) + pos)
834 else:
835 raise ValueError("invalid whence value")
836 return self._pos
837
838 def tell(self):
839 if self.closed:
840 raise ValueError("tell on closed file")
841 return self._pos
842
843 def truncate(self, pos=None):
844 if self.closed:
845 raise ValueError("truncate on closed file")
846 if pos is None:
847 pos = self._pos
848 elif pos < 0:
849 raise ValueError("negative truncate position %r" % (pos,))
850 del self._buffer[pos:]
Antoine Pitrou66f9fea2010-01-31 23:20:26 +0000851 return pos
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000852
853 def readable(self):
854 return True
855
856 def writable(self):
857 return True
858
859 def seekable(self):
860 return True
861
862
863class BufferedReader(_BufferedIOMixin):
864
865 """BufferedReader(raw[, buffer_size])
866
867 A buffer for a readable, sequential BaseRawIO object.
868
869 The constructor creates a BufferedReader for the given readable raw
870 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
871 is used.
872 """
873
874 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
875 """Create a new buffered reader using the given readable raw IO object.
876 """
Antoine Pitroucf4c7492009-04-19 00:09:36 +0000877 if not raw.readable():
878 raise IOError('"raw" argument must be readable.')
879
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000880 _BufferedIOMixin.__init__(self, raw)
881 if buffer_size <= 0:
882 raise ValueError("invalid buffer size")
883 self.buffer_size = buffer_size
884 self._reset_read_buf()
885 self._read_lock = Lock()
886
887 def _reset_read_buf(self):
888 self._read_buf = b""
889 self._read_pos = 0
890
891 def read(self, n=None):
892 """Read n bytes.
893
894 Returns exactly n bytes of data unless the underlying raw IO
895 stream reaches EOF or if the call would block in non-blocking
896 mode. If n is negative, read until EOF or until read() would
897 block.
898 """
899 if n is not None and n < -1:
900 raise ValueError("invalid number of bytes to read")
901 with self._read_lock:
902 return self._read_unlocked(n)
903
904 def _read_unlocked(self, n=None):
905 nodata_val = b""
906 empty_values = (b"", None)
907 buf = self._read_buf
908 pos = self._read_pos
909
910 # Special case for when the number of bytes to read is unspecified.
911 if n is None or n == -1:
912 self._reset_read_buf()
913 chunks = [buf[pos:]] # Strip the consumed bytes.
914 current_size = 0
915 while True:
916 # Read until EOF or until read() would block.
917 chunk = self.raw.read()
918 if chunk in empty_values:
919 nodata_val = chunk
920 break
921 current_size += len(chunk)
922 chunks.append(chunk)
923 return b"".join(chunks) or nodata_val
924
925 # The number of bytes to read is specified, return at most n bytes.
926 avail = len(buf) - pos # Length of the available buffered data.
927 if n <= avail:
928 # Fast path: the data to read is fully buffered.
929 self._read_pos += n
930 return buf[pos:pos+n]
931 # Slow path: read from the stream until enough bytes are read,
932 # or until an EOF occurs or until read() would block.
933 chunks = [buf[pos:]]
934 wanted = max(self.buffer_size, n)
935 while avail < n:
936 chunk = self.raw.read(wanted)
937 if chunk in empty_values:
938 nodata_val = chunk
939 break
940 avail += len(chunk)
941 chunks.append(chunk)
942 # n is more then avail only when an EOF occurred or when
943 # read() would have blocked.
944 n = min(n, avail)
945 out = b"".join(chunks)
946 self._read_buf = out[n:] # Save the extra data in the buffer.
947 self._read_pos = 0
948 return out[:n] if out else nodata_val
949
950 def peek(self, n=0):
951 """Returns buffered bytes without advancing the position.
952
953 The argument indicates a desired minimal number of bytes; we
954 do at most one raw read to satisfy it. We never return more
955 than self.buffer_size.
956 """
957 with self._read_lock:
958 return self._peek_unlocked(n)
959
960 def _peek_unlocked(self, n=0):
961 want = min(n, self.buffer_size)
962 have = len(self._read_buf) - self._read_pos
963 if have < want or have <= 0:
964 to_read = self.buffer_size - have
965 current = self.raw.read(to_read)
966 if current:
967 self._read_buf = self._read_buf[self._read_pos:] + current
968 self._read_pos = 0
969 return self._read_buf[self._read_pos:]
970
971 def read1(self, n):
972 """Reads up to n bytes, with at most one read() system call."""
973 # Returns up to n bytes. If at least one byte is buffered, we
974 # only return buffered bytes. Otherwise, we do one raw read.
975 if n < 0:
976 raise ValueError("number of bytes to read must be positive")
977 if n == 0:
978 return b""
979 with self._read_lock:
980 self._peek_unlocked(1)
981 return self._read_unlocked(
982 min(n, len(self._read_buf) - self._read_pos))
983
984 def tell(self):
985 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
986
987 def seek(self, pos, whence=0):
988 if not (0 <= whence <= 2):
989 raise ValueError("invalid whence value")
990 with self._read_lock:
991 if whence == 1:
992 pos -= len(self._read_buf) - self._read_pos
993 pos = _BufferedIOMixin.seek(self, pos, whence)
994 self._reset_read_buf()
995 return pos
996
997class BufferedWriter(_BufferedIOMixin):
998
999 """A buffer for a writeable sequential RawIO object.
1000
1001 The constructor creates a BufferedWriter for the given writeable raw
1002 stream. If the buffer_size is not given, it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001003 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001004 """
1005
Benjamin Peterson59406a92009-03-26 17:10:29 +00001006 _warning_stack_offset = 2
1007
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001008 def __init__(self, raw,
1009 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001010 if not raw.writable():
1011 raise IOError('"raw" argument must be writable.')
1012
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001013 _BufferedIOMixin.__init__(self, raw)
1014 if buffer_size <= 0:
1015 raise ValueError("invalid buffer size")
Benjamin Peterson59406a92009-03-26 17:10:29 +00001016 if max_buffer_size is not None:
1017 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1018 self._warning_stack_offset)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001019 self.buffer_size = buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001020 self._write_buf = bytearray()
1021 self._write_lock = Lock()
1022
1023 def write(self, b):
1024 if self.closed:
1025 raise ValueError("write to closed file")
1026 if isinstance(b, str):
1027 raise TypeError("can't write str to binary stream")
1028 with self._write_lock:
1029 # XXX we can implement some more tricks to try and avoid
1030 # partial writes
1031 if len(self._write_buf) > self.buffer_size:
1032 # We're full, so let's pre-flush the buffer
1033 try:
1034 self._flush_unlocked()
1035 except BlockingIOError as e:
1036 # We can't accept anything else.
1037 # XXX Why not just let the exception pass through?
1038 raise BlockingIOError(e.errno, e.strerror, 0)
1039 before = len(self._write_buf)
1040 self._write_buf.extend(b)
1041 written = len(self._write_buf) - before
1042 if len(self._write_buf) > self.buffer_size:
1043 try:
1044 self._flush_unlocked()
1045 except BlockingIOError as e:
Benjamin Peterson394ee002009-03-05 22:33:59 +00001046 if len(self._write_buf) > self.buffer_size:
1047 # We've hit the buffer_size. We have to accept a partial
1048 # write and cut back our buffer.
1049 overage = len(self._write_buf) - self.buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001050 written -= overage
Benjamin Peterson394ee002009-03-05 22:33:59 +00001051 self._write_buf = self._write_buf[:self.buffer_size]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001052 raise BlockingIOError(e.errno, e.strerror, written)
1053 return written
1054
1055 def truncate(self, pos=None):
1056 with self._write_lock:
1057 self._flush_unlocked()
1058 if pos is None:
1059 pos = self.raw.tell()
1060 return self.raw.truncate(pos)
1061
1062 def flush(self):
1063 with self._write_lock:
1064 self._flush_unlocked()
1065
1066 def _flush_unlocked(self):
1067 if self.closed:
1068 raise ValueError("flush of closed file")
1069 written = 0
1070 try:
1071 while self._write_buf:
1072 n = self.raw.write(self._write_buf)
1073 if n > len(self._write_buf) or n < 0:
1074 raise IOError("write() returned incorrect number of bytes")
1075 del self._write_buf[:n]
1076 written += n
1077 except BlockingIOError as e:
1078 n = e.characters_written
1079 del self._write_buf[:n]
1080 written += n
1081 raise BlockingIOError(e.errno, e.strerror, written)
1082
1083 def tell(self):
1084 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1085
1086 def seek(self, pos, whence=0):
1087 if not (0 <= whence <= 2):
1088 raise ValueError("invalid whence")
1089 with self._write_lock:
1090 self._flush_unlocked()
1091 return _BufferedIOMixin.seek(self, pos, whence)
1092
1093
1094class BufferedRWPair(BufferedIOBase):
1095
1096 """A buffered reader and writer object together.
1097
1098 A buffered reader object and buffered writer object put together to
1099 form a sequential IO object that can read and write. This is typically
1100 used with a socket or two-way pipe.
1101
1102 reader and writer are RawIOBase objects that are readable and
1103 writeable respectively. If the buffer_size is omitted it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001104 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001105 """
1106
1107 # XXX The usefulness of this (compared to having two separate IO
1108 # objects) is questionable.
1109
1110 def __init__(self, reader, writer,
1111 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1112 """Constructor.
1113
1114 The arguments are two RawIO instances.
1115 """
Benjamin Peterson59406a92009-03-26 17:10:29 +00001116 if max_buffer_size is not None:
1117 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001118
1119 if not reader.readable():
1120 raise IOError('"reader" argument must be readable.')
1121
1122 if not writer.writable():
1123 raise IOError('"writer" argument must be writable.')
1124
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001125 self.reader = BufferedReader(reader, buffer_size)
Benjamin Peterson59406a92009-03-26 17:10:29 +00001126 self.writer = BufferedWriter(writer, buffer_size)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001127
1128 def read(self, n=None):
1129 if n is None:
1130 n = -1
1131 return self.reader.read(n)
1132
1133 def readinto(self, b):
1134 return self.reader.readinto(b)
1135
1136 def write(self, b):
1137 return self.writer.write(b)
1138
1139 def peek(self, n=0):
1140 return self.reader.peek(n)
1141
1142 def read1(self, n):
1143 return self.reader.read1(n)
1144
1145 def readable(self):
1146 return self.reader.readable()
1147
1148 def writable(self):
1149 return self.writer.writable()
1150
1151 def flush(self):
1152 return self.writer.flush()
1153
1154 def close(self):
1155 self.writer.close()
1156 self.reader.close()
1157
1158 def isatty(self):
1159 return self.reader.isatty() or self.writer.isatty()
1160
1161 @property
1162 def closed(self):
1163 return self.writer.closed
1164
1165
1166class BufferedRandom(BufferedWriter, BufferedReader):
1167
1168 """A buffered interface to random access streams.
1169
1170 The constructor creates a reader and writer for a seekable stream,
1171 raw, given in the first argument. If the buffer_size is omitted it
Benjamin Peterson59406a92009-03-26 17:10:29 +00001172 defaults to DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001173 """
1174
Benjamin Peterson59406a92009-03-26 17:10:29 +00001175 _warning_stack_offset = 3
1176
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001177 def __init__(self, raw,
1178 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1179 raw._checkSeekable()
1180 BufferedReader.__init__(self, raw, buffer_size)
1181 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1182
1183 def seek(self, pos, whence=0):
1184 if not (0 <= whence <= 2):
1185 raise ValueError("invalid whence")
1186 self.flush()
1187 if self._read_buf:
1188 # Undo read ahead.
1189 with self._read_lock:
1190 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1191 # First do the raw seek, then empty the read buffer, so that
1192 # if the raw seek fails, we don't lose buffered data forever.
1193 pos = self.raw.seek(pos, whence)
1194 with self._read_lock:
1195 self._reset_read_buf()
1196 if pos < 0:
1197 raise IOError("seek() returned invalid position")
1198 return pos
1199
1200 def tell(self):
1201 if self._write_buf:
1202 return BufferedWriter.tell(self)
1203 else:
1204 return BufferedReader.tell(self)
1205
1206 def truncate(self, pos=None):
1207 if pos is None:
1208 pos = self.tell()
1209 # Use seek to flush the read buffer.
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001210 return BufferedWriter.truncate(self, pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001211
1212 def read(self, n=None):
1213 if n is None:
1214 n = -1
1215 self.flush()
1216 return BufferedReader.read(self, n)
1217
1218 def readinto(self, b):
1219 self.flush()
1220 return BufferedReader.readinto(self, b)
1221
1222 def peek(self, n=0):
1223 self.flush()
1224 return BufferedReader.peek(self, n)
1225
1226 def read1(self, n):
1227 self.flush()
1228 return BufferedReader.read1(self, n)
1229
1230 def write(self, b):
1231 if self._read_buf:
1232 # Undo readahead
1233 with self._read_lock:
1234 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1235 self._reset_read_buf()
1236 return BufferedWriter.write(self, b)
1237
1238
1239class TextIOBase(IOBase):
1240
1241 """Base class for text I/O.
1242
1243 This class provides a character and line based interface to stream
1244 I/O. There is no readinto method because Python's character strings
1245 are immutable. There is no public constructor.
1246 """
1247
1248 def read(self, n: int = -1) -> str:
1249 """Read at most n characters from stream.
1250
1251 Read from underlying buffer until we have n characters or we hit EOF.
1252 If n is negative or omitted, read until EOF.
1253 """
1254 self._unsupported("read")
1255
1256 def write(self, s: str) -> int:
1257 """Write string s to stream."""
1258 self._unsupported("write")
1259
1260 def truncate(self, pos: int = None) -> int:
1261 """Truncate size to pos."""
1262 self._unsupported("truncate")
1263
1264 def readline(self) -> str:
1265 """Read until newline or EOF.
1266
1267 Returns an empty string if EOF is hit immediately.
1268 """
1269 self._unsupported("readline")
1270
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001271 def detach(self) -> None:
1272 """
1273 Separate the underlying buffer from the TextIOBase and return it.
1274
1275 After the underlying buffer has been detached, the TextIO is in an
1276 unusable state.
1277 """
1278 self._unsupported("detach")
1279
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001280 @property
1281 def encoding(self):
1282 """Subclasses should override."""
1283 return None
1284
1285 @property
1286 def newlines(self):
1287 """Line endings translated so far.
1288
1289 Only line endings translated during reading are considered.
1290
1291 Subclasses should override.
1292 """
1293 return None
1294
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001295 @property
1296 def errors(self):
1297 """Error setting of the decoder or encoder.
1298
1299 Subclasses should override."""
1300 return None
1301
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001302io.TextIOBase.register(TextIOBase)
1303
1304
1305class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1306 r"""Codec used when reading a file in universal newlines mode. It wraps
1307 another incremental decoder, translating \r\n and \r into \n. It also
1308 records the types of newlines encountered. When used with
1309 translate=False, it ensures that the newline sequence is returned in
1310 one piece.
1311 """
1312 def __init__(self, decoder, translate, errors='strict'):
1313 codecs.IncrementalDecoder.__init__(self, errors=errors)
1314 self.translate = translate
1315 self.decoder = decoder
1316 self.seennl = 0
1317 self.pendingcr = False
1318
1319 def decode(self, input, final=False):
1320 # decode input (with the eventual \r from a previous pass)
1321 if self.decoder is None:
1322 output = input
1323 else:
1324 output = self.decoder.decode(input, final=final)
1325 if self.pendingcr and (output or final):
1326 output = "\r" + output
1327 self.pendingcr = False
1328
1329 # retain last \r even when not translating data:
1330 # then readline() is sure to get \r\n in one pass
1331 if output.endswith("\r") and not final:
1332 output = output[:-1]
1333 self.pendingcr = True
1334
1335 # Record which newlines are read
1336 crlf = output.count('\r\n')
1337 cr = output.count('\r') - crlf
1338 lf = output.count('\n') - crlf
1339 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1340 | (crlf and self._CRLF)
1341
1342 if self.translate:
1343 if crlf:
1344 output = output.replace("\r\n", "\n")
1345 if cr:
1346 output = output.replace("\r", "\n")
1347
1348 return output
1349
1350 def getstate(self):
1351 if self.decoder is None:
1352 buf = b""
1353 flag = 0
1354 else:
1355 buf, flag = self.decoder.getstate()
1356 flag <<= 1
1357 if self.pendingcr:
1358 flag |= 1
1359 return buf, flag
1360
1361 def setstate(self, state):
1362 buf, flag = state
1363 self.pendingcr = bool(flag & 1)
1364 if self.decoder is not None:
1365 self.decoder.setstate((buf, flag >> 1))
1366
1367 def reset(self):
1368 self.seennl = 0
1369 self.pendingcr = False
1370 if self.decoder is not None:
1371 self.decoder.reset()
1372
1373 _LF = 1
1374 _CR = 2
1375 _CRLF = 4
1376
1377 @property
1378 def newlines(self):
1379 return (None,
1380 "\n",
1381 "\r",
1382 ("\r", "\n"),
1383 "\r\n",
1384 ("\n", "\r\n"),
1385 ("\r", "\r\n"),
1386 ("\r", "\n", "\r\n")
1387 )[self.seennl]
1388
1389
1390class TextIOWrapper(TextIOBase):
1391
1392 r"""Character and line based layer over a BufferedIOBase object, buffer.
1393
1394 encoding gives the name of the encoding that the stream will be
1395 decoded or encoded with. It defaults to locale.getpreferredencoding.
1396
1397 errors determines the strictness of encoding and decoding (see the
1398 codecs.register) and defaults to "strict".
1399
1400 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1401 handling of line endings. If it is None, universal newlines is
1402 enabled. With this enabled, on input, the lines endings '\n', '\r',
1403 or '\r\n' are translated to '\n' before being returned to the
1404 caller. Conversely, on output, '\n' is translated to the system
1405 default line seperator, os.linesep. If newline is any other of its
1406 legal values, that newline becomes the newline when the file is read
1407 and it is returned untranslated. On output, '\n' is converted to the
1408 newline.
1409
1410 If line_buffering is True, a call to flush is implied when a call to
1411 write contains a newline character.
1412 """
1413
1414 _CHUNK_SIZE = 2048
1415
1416 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1417 line_buffering=False):
1418 if newline is not None and not isinstance(newline, str):
1419 raise TypeError("illegal newline type: %r" % (type(newline),))
1420 if newline not in (None, "", "\n", "\r", "\r\n"):
1421 raise ValueError("illegal newline value: %r" % (newline,))
1422 if encoding is None:
1423 try:
1424 encoding = os.device_encoding(buffer.fileno())
1425 except (AttributeError, UnsupportedOperation):
1426 pass
1427 if encoding is None:
1428 try:
1429 import locale
1430 except ImportError:
1431 # Importing locale may fail if Python is being built
1432 encoding = "ascii"
1433 else:
1434 encoding = locale.getpreferredencoding()
1435
1436 if not isinstance(encoding, str):
1437 raise ValueError("invalid encoding: %r" % encoding)
1438
1439 if errors is None:
1440 errors = "strict"
1441 else:
1442 if not isinstance(errors, str):
1443 raise ValueError("invalid errors: %r" % errors)
1444
1445 self.buffer = buffer
1446 self._line_buffering = line_buffering
1447 self._encoding = encoding
1448 self._errors = errors
1449 self._readuniversal = not newline
1450 self._readtranslate = newline is None
1451 self._readnl = newline
1452 self._writetranslate = newline != ''
1453 self._writenl = newline or os.linesep
1454 self._encoder = None
1455 self._decoder = None
1456 self._decoded_chars = '' # buffer for text returned from decoder
1457 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1458 self._snapshot = None # info for reconstructing decoder state
1459 self._seekable = self._telling = self.buffer.seekable()
1460
Antoine Pitroue4501852009-05-14 18:55:55 +00001461 if self._seekable and self.writable():
1462 position = self.buffer.tell()
1463 if position != 0:
1464 try:
1465 self._get_encoder().setstate(0)
1466 except LookupError:
1467 # Sometimes the encoder doesn't exist
1468 pass
1469
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001470 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1471 # where dec_flags is the second (integer) item of the decoder state
1472 # and next_input is the chunk of input bytes that comes next after the
1473 # snapshot point. We use this to reconstruct decoder states in tell().
1474
1475 # Naming convention:
1476 # - "bytes_..." for integer variables that count input bytes
1477 # - "chars_..." for integer variables that count decoded characters
1478
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001479 def __repr__(self):
Antoine Pitrou716c4442009-05-23 19:04:03 +00001480 try:
1481 name = self.name
1482 except AttributeError:
1483 return "<_pyio.TextIOWrapper encoding={0!r}>".format(self.encoding)
1484 else:
1485 return "<_pyio.TextIOWrapper name={0!r} encoding={1!r}>".format(
1486 name, self.encoding)
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001487
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001488 @property
1489 def encoding(self):
1490 return self._encoding
1491
1492 @property
1493 def errors(self):
1494 return self._errors
1495
1496 @property
1497 def line_buffering(self):
1498 return self._line_buffering
1499
1500 def seekable(self):
1501 return self._seekable
1502
1503 def readable(self):
1504 return self.buffer.readable()
1505
1506 def writable(self):
1507 return self.buffer.writable()
1508
1509 def flush(self):
1510 self.buffer.flush()
1511 self._telling = self._seekable
1512
1513 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +00001514 if self.buffer is not None and not self.closed:
1515 self.flush()
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001516 self.buffer.close()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001517
1518 @property
1519 def closed(self):
1520 return self.buffer.closed
1521
1522 @property
1523 def name(self):
1524 return self.buffer.name
1525
1526 def fileno(self):
1527 return self.buffer.fileno()
1528
1529 def isatty(self):
1530 return self.buffer.isatty()
1531
1532 def write(self, s: str):
1533 if self.closed:
1534 raise ValueError("write to closed file")
1535 if not isinstance(s, str):
1536 raise TypeError("can't write %s to text stream" %
1537 s.__class__.__name__)
1538 length = len(s)
1539 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1540 if haslf and self._writetranslate and self._writenl != "\n":
1541 s = s.replace("\n", self._writenl)
1542 encoder = self._encoder or self._get_encoder()
1543 # XXX What if we were just reading?
1544 b = encoder.encode(s)
1545 self.buffer.write(b)
1546 if self._line_buffering and (haslf or "\r" in s):
1547 self.flush()
1548 self._snapshot = None
1549 if self._decoder:
1550 self._decoder.reset()
1551 return length
1552
1553 def _get_encoder(self):
1554 make_encoder = codecs.getincrementalencoder(self._encoding)
1555 self._encoder = make_encoder(self._errors)
1556 return self._encoder
1557
1558 def _get_decoder(self):
1559 make_decoder = codecs.getincrementaldecoder(self._encoding)
1560 decoder = make_decoder(self._errors)
1561 if self._readuniversal:
1562 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1563 self._decoder = decoder
1564 return decoder
1565
1566 # The following three methods implement an ADT for _decoded_chars.
1567 # Text returned from the decoder is buffered here until the client
1568 # requests it by calling our read() or readline() method.
1569 def _set_decoded_chars(self, chars):
1570 """Set the _decoded_chars buffer."""
1571 self._decoded_chars = chars
1572 self._decoded_chars_used = 0
1573
1574 def _get_decoded_chars(self, n=None):
1575 """Advance into the _decoded_chars buffer."""
1576 offset = self._decoded_chars_used
1577 if n is None:
1578 chars = self._decoded_chars[offset:]
1579 else:
1580 chars = self._decoded_chars[offset:offset + n]
1581 self._decoded_chars_used += len(chars)
1582 return chars
1583
1584 def _rewind_decoded_chars(self, n):
1585 """Rewind the _decoded_chars buffer."""
1586 if self._decoded_chars_used < n:
1587 raise AssertionError("rewind decoded_chars out of bounds")
1588 self._decoded_chars_used -= n
1589
1590 def _read_chunk(self):
1591 """
1592 Read and decode the next chunk of data from the BufferedReader.
1593 """
1594
1595 # The return value is True unless EOF was reached. The decoded
1596 # string is placed in self._decoded_chars (replacing its previous
1597 # value). The entire input chunk is sent to the decoder, though
1598 # some of it may remain buffered in the decoder, yet to be
1599 # converted.
1600
1601 if self._decoder is None:
1602 raise ValueError("no decoder")
1603
1604 if self._telling:
1605 # To prepare for tell(), we need to snapshot a point in the
1606 # file where the decoder's input buffer is empty.
1607
1608 dec_buffer, dec_flags = self._decoder.getstate()
1609 # Given this, we know there was a valid snapshot point
1610 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1611
1612 # Read a chunk, decode it, and put the result in self._decoded_chars.
1613 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1614 eof = not input_chunk
1615 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1616
1617 if self._telling:
1618 # At the snapshot point, len(dec_buffer) bytes before the read,
1619 # the next input to be decoded is dec_buffer + input_chunk.
1620 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1621
1622 return not eof
1623
1624 def _pack_cookie(self, position, dec_flags=0,
1625 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1626 # The meaning of a tell() cookie is: seek to position, set the
1627 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1628 # into the decoder with need_eof as the EOF flag, then skip
1629 # chars_to_skip characters of the decoded result. For most simple
1630 # decoders, tell() will often just give a byte offset in the file.
1631 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1632 (chars_to_skip<<192) | bool(need_eof)<<256)
1633
1634 def _unpack_cookie(self, bigint):
1635 rest, position = divmod(bigint, 1<<64)
1636 rest, dec_flags = divmod(rest, 1<<64)
1637 rest, bytes_to_feed = divmod(rest, 1<<64)
1638 need_eof, chars_to_skip = divmod(rest, 1<<64)
1639 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1640
1641 def tell(self):
1642 if not self._seekable:
1643 raise IOError("underlying stream is not seekable")
1644 if not self._telling:
1645 raise IOError("telling position disabled by next() call")
1646 self.flush()
1647 position = self.buffer.tell()
1648 decoder = self._decoder
1649 if decoder is None or self._snapshot is None:
1650 if self._decoded_chars:
1651 # This should never happen.
1652 raise AssertionError("pending decoded text")
1653 return position
1654
1655 # Skip backward to the snapshot point (see _read_chunk).
1656 dec_flags, next_input = self._snapshot
1657 position -= len(next_input)
1658
1659 # How many decoded characters have been used up since the snapshot?
1660 chars_to_skip = self._decoded_chars_used
1661 if chars_to_skip == 0:
1662 # We haven't moved from the snapshot point.
1663 return self._pack_cookie(position, dec_flags)
1664
1665 # Starting from the snapshot position, we will walk the decoder
1666 # forward until it gives us enough decoded characters.
1667 saved_state = decoder.getstate()
1668 try:
1669 # Note our initial start point.
1670 decoder.setstate((b'', dec_flags))
1671 start_pos = position
1672 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1673 need_eof = 0
1674
1675 # Feed the decoder one byte at a time. As we go, note the
1676 # nearest "safe start point" before the current location
1677 # (a point where the decoder has nothing buffered, so seek()
1678 # can safely start from there and advance to this location).
1679 next_byte = bytearray(1)
1680 for next_byte[0] in next_input:
1681 bytes_fed += 1
1682 chars_decoded += len(decoder.decode(next_byte))
1683 dec_buffer, dec_flags = decoder.getstate()
1684 if not dec_buffer and chars_decoded <= chars_to_skip:
1685 # Decoder buffer is empty, so this is a safe start point.
1686 start_pos += bytes_fed
1687 chars_to_skip -= chars_decoded
1688 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1689 if chars_decoded >= chars_to_skip:
1690 break
1691 else:
1692 # We didn't get enough decoded data; signal EOF to get more.
1693 chars_decoded += len(decoder.decode(b'', final=True))
1694 need_eof = 1
1695 if chars_decoded < chars_to_skip:
1696 raise IOError("can't reconstruct logical file position")
1697
1698 # The returned cookie corresponds to the last safe start point.
1699 return self._pack_cookie(
1700 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1701 finally:
1702 decoder.setstate(saved_state)
1703
1704 def truncate(self, pos=None):
1705 self.flush()
1706 if pos is None:
1707 pos = self.tell()
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001708 return self.buffer.truncate(pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001709
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001710 def detach(self):
1711 if self.buffer is None:
1712 raise ValueError("buffer is already detached")
1713 self.flush()
1714 buffer = self.buffer
1715 self.buffer = None
1716 return buffer
1717
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001718 def seek(self, cookie, whence=0):
1719 if self.closed:
1720 raise ValueError("tell on closed file")
1721 if not self._seekable:
1722 raise IOError("underlying stream is not seekable")
1723 if whence == 1: # seek relative to current position
1724 if cookie != 0:
1725 raise IOError("can't do nonzero cur-relative seeks")
1726 # Seeking to the current position should attempt to
1727 # sync the underlying buffer with the current position.
1728 whence = 0
1729 cookie = self.tell()
1730 if whence == 2: # seek relative to end of file
1731 if cookie != 0:
1732 raise IOError("can't do nonzero end-relative seeks")
1733 self.flush()
1734 position = self.buffer.seek(0, 2)
1735 self._set_decoded_chars('')
1736 self._snapshot = None
1737 if self._decoder:
1738 self._decoder.reset()
1739 return position
1740 if whence != 0:
1741 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1742 (whence,))
1743 if cookie < 0:
1744 raise ValueError("negative seek position %r" % (cookie,))
1745 self.flush()
1746
1747 # The strategy of seek() is to go back to the safe start point
1748 # and replay the effect of read(chars_to_skip) from there.
1749 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1750 self._unpack_cookie(cookie)
1751
1752 # Seek back to the safe start point.
1753 self.buffer.seek(start_pos)
1754 self._set_decoded_chars('')
1755 self._snapshot = None
1756
1757 # Restore the decoder to its state from the safe start point.
Benjamin Peterson9363a652009-03-05 00:42:09 +00001758 if cookie == 0 and self._decoder:
1759 self._decoder.reset()
1760 elif self._decoder or dec_flags or chars_to_skip:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001761 self._decoder = self._decoder or self._get_decoder()
1762 self._decoder.setstate((b'', dec_flags))
1763 self._snapshot = (dec_flags, b'')
1764
1765 if chars_to_skip:
1766 # Just like _read_chunk, feed the decoder and save a snapshot.
1767 input_chunk = self.buffer.read(bytes_to_feed)
1768 self._set_decoded_chars(
1769 self._decoder.decode(input_chunk, need_eof))
1770 self._snapshot = (dec_flags, input_chunk)
1771
1772 # Skip chars_to_skip of the decoded characters.
1773 if len(self._decoded_chars) < chars_to_skip:
1774 raise IOError("can't restore logical file position")
1775 self._decoded_chars_used = chars_to_skip
1776
Antoine Pitroue4501852009-05-14 18:55:55 +00001777 # Finally, reset the encoder (merely useful for proper BOM handling)
1778 try:
1779 encoder = self._encoder or self._get_encoder()
1780 except LookupError:
1781 # Sometimes the encoder doesn't exist
1782 pass
1783 else:
1784 if cookie != 0:
1785 encoder.setstate(0)
1786 else:
1787 encoder.reset()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001788 return cookie
1789
1790 def read(self, n=None):
Benjamin Petersona1b49012009-03-31 23:11:32 +00001791 self._checkReadable()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001792 if n is None:
1793 n = -1
1794 decoder = self._decoder or self._get_decoder()
1795 if n < 0:
1796 # Read everything.
1797 result = (self._get_decoded_chars() +
1798 decoder.decode(self.buffer.read(), final=True))
1799 self._set_decoded_chars('')
1800 self._snapshot = None
1801 return result
1802 else:
1803 # Keep reading chunks until we have n characters to return.
1804 eof = False
1805 result = self._get_decoded_chars(n)
1806 while len(result) < n and not eof:
1807 eof = not self._read_chunk()
1808 result += self._get_decoded_chars(n - len(result))
1809 return result
1810
1811 def __next__(self):
1812 self._telling = False
1813 line = self.readline()
1814 if not line:
1815 self._snapshot = None
1816 self._telling = self._seekable
1817 raise StopIteration
1818 return line
1819
1820 def readline(self, limit=None):
1821 if self.closed:
1822 raise ValueError("read from closed file")
1823 if limit is None:
1824 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +00001825 elif not isinstance(limit, int):
1826 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001827
1828 # Grab all the decoded text (we will rewind any extra bits later).
1829 line = self._get_decoded_chars()
1830
1831 start = 0
1832 # Make the decoder if it doesn't already exist.
1833 if not self._decoder:
1834 self._get_decoder()
1835
1836 pos = endpos = None
1837 while True:
1838 if self._readtranslate:
1839 # Newlines are already translated, only search for \n
1840 pos = line.find('\n', start)
1841 if pos >= 0:
1842 endpos = pos + 1
1843 break
1844 else:
1845 start = len(line)
1846
1847 elif self._readuniversal:
1848 # Universal newline search. Find any of \r, \r\n, \n
1849 # The decoder ensures that \r\n are not split in two pieces
1850
1851 # In C we'd look for these in parallel of course.
1852 nlpos = line.find("\n", start)
1853 crpos = line.find("\r", start)
1854 if crpos == -1:
1855 if nlpos == -1:
1856 # Nothing found
1857 start = len(line)
1858 else:
1859 # Found \n
1860 endpos = nlpos + 1
1861 break
1862 elif nlpos == -1:
1863 # Found lone \r
1864 endpos = crpos + 1
1865 break
1866 elif nlpos < crpos:
1867 # Found \n
1868 endpos = nlpos + 1
1869 break
1870 elif nlpos == crpos + 1:
1871 # Found \r\n
1872 endpos = crpos + 2
1873 break
1874 else:
1875 # Found \r
1876 endpos = crpos + 1
1877 break
1878 else:
1879 # non-universal
1880 pos = line.find(self._readnl)
1881 if pos >= 0:
1882 endpos = pos + len(self._readnl)
1883 break
1884
1885 if limit >= 0 and len(line) >= limit:
1886 endpos = limit # reached length limit
1887 break
1888
1889 # No line ending seen yet - get more data'
1890 while self._read_chunk():
1891 if self._decoded_chars:
1892 break
1893 if self._decoded_chars:
1894 line += self._get_decoded_chars()
1895 else:
1896 # end of file
1897 self._set_decoded_chars('')
1898 self._snapshot = None
1899 return line
1900
1901 if limit >= 0 and endpos > limit:
1902 endpos = limit # don't exceed limit
1903
1904 # Rewind _decoded_chars to just after the line ending we found.
1905 self._rewind_decoded_chars(len(line) - endpos)
1906 return line[:endpos]
1907
1908 @property
1909 def newlines(self):
1910 return self._decoder.newlines if self._decoder else None
1911
1912
1913class StringIO(TextIOWrapper):
1914 """Text I/O implementation using an in-memory buffer.
1915
1916 The initial_value argument sets the value of object. The newline
1917 argument is like the one of TextIOWrapper's constructor.
1918 """
1919
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001920 def __init__(self, initial_value="", newline="\n"):
1921 super(StringIO, self).__init__(BytesIO(),
1922 encoding="utf-8",
1923 errors="strict",
1924 newline=newline)
Antoine Pitrou11446482009-04-04 14:09:30 +00001925 # Issue #5645: make universal newlines semantics the same as in the
1926 # C version, even under Windows.
1927 if newline is None:
1928 self._writetranslate = False
Georg Brandl194da4a2009-08-13 09:34:05 +00001929 if initial_value is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001930 if not isinstance(initial_value, str):
Georg Brandl194da4a2009-08-13 09:34:05 +00001931 raise TypeError("initial_value must be str or None, not {0}"
1932 .format(type(initial_value).__name__))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001933 initial_value = str(initial_value)
1934 self.write(initial_value)
1935 self.seek(0)
1936
1937 def getvalue(self):
1938 self.flush()
1939 return self.buffer.getvalue().decode(self._encoding, self._errors)
Benjamin Peterson9fd459a2009-03-09 00:09:44 +00001940
1941 def __repr__(self):
1942 # TextIOWrapper tells the encoding in its repr. In StringIO,
1943 # that's a implementation detail.
1944 return object.__repr__(self)
Benjamin Petersonb487e632009-03-21 03:08:31 +00001945
1946 @property
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001947 def errors(self):
1948 return None
1949
1950 @property
Benjamin Petersonb487e632009-03-21 03:08:31 +00001951 def encoding(self):
1952 return None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001953
1954 def detach(self):
1955 # This doesn't make sense on StringIO.
1956 self._unsupported("detach")