blob: 04aed8d43aa63e95a084d4d5cc143b8837ebc9f6 [file] [log] [blame]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001"""
2Python implementation of the io module.
3"""
4
5import os
6import abc
7import codecs
Benjamin Peterson59406a92009-03-26 17:10:29 +00008import warnings
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00009# Import _thread instead of threading to reduce startup cost
10try:
11 from _thread import allocate_lock as Lock
12except ImportError:
13 from _dummy_thread import allocate_lock as Lock
14
15import io
16from io import __all__
Benjamin Peterson8d5fd4e2009-04-02 01:03:26 +000017from io import SEEK_SET, SEEK_CUR, SEEK_END
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000018
19# open() uses st_blksize whenever we can
20DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
21
22# NOTE: Base classes defined here are registered with the "official" ABCs
23# defined in io.py. We don't use real inheritance though, because we don't
24# want to inherit the C implementations.
25
26
27class BlockingIOError(IOError):
28
29 """Exception raised when I/O would block on a non-blocking I/O stream."""
30
31 def __init__(self, errno, strerror, characters_written=0):
32 super().__init__(errno, strerror)
33 if not isinstance(characters_written, int):
34 raise TypeError("characters_written must be a integer")
35 self.characters_written = characters_written
36
37
Benjamin Peterson9990e8c2009-04-18 14:47:50 +000038def open(file: (str, bytes), mode: str = "r", buffering: int = None,
39 encoding: str = None, errors: str = None,
40 newline: str = None, closefd: bool = True) -> "IOBase":
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000041
42 r"""Open file and return a stream. Raise IOError upon failure.
43
44 file is either a text or byte string giving the name (and the path
45 if the file isn't in the current working directory) of the file to
46 be opened or an integer file descriptor of the file to be
47 wrapped. (If a file descriptor is given, it is closed when the
48 returned I/O object is closed, unless closefd is set to False.)
49
50 mode is an optional string that specifies the mode in which the file
51 is opened. It defaults to 'r' which means open for reading in text
52 mode. Other common values are 'w' for writing (truncating the file if
53 it already exists), and 'a' for appending (which on some Unix systems,
54 means that all writes append to the end of the file regardless of the
55 current seek position). In text mode, if encoding is not specified the
56 encoding used is platform dependent. (For reading and writing raw
57 bytes use binary mode and leave encoding unspecified.) The available
58 modes are:
59
60 ========= ===============================================================
61 Character Meaning
62 --------- ---------------------------------------------------------------
63 'r' open for reading (default)
64 'w' open for writing, truncating the file first
65 'a' open for writing, appending to the end of the file if it exists
66 'b' binary mode
67 't' text mode (default)
68 '+' open a disk file for updating (reading and writing)
69 'U' universal newline mode (for backwards compatibility; unneeded
70 for new code)
71 ========= ===============================================================
72
73 The default mode is 'rt' (open for reading text). For binary random
74 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
75 'r+b' opens the file without truncation.
76
77 Python distinguishes between files opened in binary and text modes,
78 even when the underlying operating system doesn't. Files opened in
79 binary mode (appending 'b' to the mode argument) return contents as
80 bytes objects without any decoding. In text mode (the default, or when
81 't' is appended to the mode argument), the contents of the file are
82 returned as strings, the bytes having been first decoded using a
83 platform-dependent encoding or using the specified encoding if given.
84
85 buffering is an optional integer used to set the buffering policy. By
86 default full buffering is on. Pass 0 to switch buffering off (only
87 allowed in binary mode), 1 to set line buffering, and an integer > 1
88 for full buffering.
89
90 encoding is the name of the encoding used to decode or encode the
91 file. This should only be used in text mode. The default encoding is
92 platform dependent, but any encoding supported by Python can be
93 passed. See the codecs module for the list of supported encodings.
94
95 errors is an optional string that specifies how encoding errors are to
96 be handled---this argument should not be used in binary mode. Pass
97 'strict' to raise a ValueError exception if there is an encoding error
98 (the default of None has the same effect), or pass 'ignore' to ignore
99 errors. (Note that ignoring encoding errors can lead to data loss.)
100 See the documentation for codecs.register for a list of the permitted
101 encoding error strings.
102
103 newline controls how universal newlines works (it only applies to text
104 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
105 follows:
106
107 * On input, if newline is None, universal newlines mode is
108 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
109 these are translated into '\n' before being returned to the
110 caller. If it is '', universal newline mode is enabled, but line
111 endings are returned to the caller untranslated. If it has any of
112 the other legal values, input lines are only terminated by the given
113 string, and the line ending is returned to the caller untranslated.
114
115 * On output, if newline is None, any '\n' characters written are
116 translated to the system default line separator, os.linesep. If
117 newline is '', no translation takes place. If newline is any of the
118 other legal values, any '\n' characters written are translated to
119 the given string.
120
121 If closefd is False, the underlying file descriptor will be kept open
122 when the file is closed. This does not work when a file name is given
123 and must be True in that case.
124
125 open() returns a file object whose type depends on the mode, and
126 through which the standard file operations such as reading and writing
127 are performed. When open() is used to open a file in a text mode ('w',
128 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
129 a file in a binary mode, the returned class varies: in read binary
130 mode, it returns a BufferedReader; in write binary and append binary
131 modes, it returns a BufferedWriter, and in read/write mode, it returns
132 a BufferedRandom.
133
134 It is also possible to use a string or bytearray as a file for both
135 reading and writing. For strings StringIO can be used like a file
136 opened in a text mode, and for bytes a BytesIO can be used like a file
137 opened in a binary mode.
138 """
139 if not isinstance(file, (str, bytes, int)):
140 raise TypeError("invalid file: %r" % file)
141 if not isinstance(mode, str):
142 raise TypeError("invalid mode: %r" % mode)
143 if buffering is not None and not isinstance(buffering, int):
144 raise TypeError("invalid buffering: %r" % buffering)
145 if encoding is not None and not isinstance(encoding, str):
146 raise TypeError("invalid encoding: %r" % encoding)
147 if errors is not None and not isinstance(errors, str):
148 raise TypeError("invalid errors: %r" % errors)
149 modes = set(mode)
150 if modes - set("arwb+tU") or len(mode) > len(modes):
151 raise ValueError("invalid mode: %r" % mode)
152 reading = "r" in modes
153 writing = "w" in modes
154 appending = "a" in modes
155 updating = "+" in modes
156 text = "t" in modes
157 binary = "b" in modes
158 if "U" in modes:
159 if writing or appending:
160 raise ValueError("can't use U and writing mode at once")
161 reading = True
162 if text and binary:
163 raise ValueError("can't have text and binary mode at once")
164 if reading + writing + appending > 1:
165 raise ValueError("can't have read/write/append mode at once")
166 if not (reading or writing or appending):
167 raise ValueError("must have exactly one of read/write/append mode")
168 if binary and encoding is not None:
169 raise ValueError("binary mode doesn't take an encoding argument")
170 if binary and errors is not None:
171 raise ValueError("binary mode doesn't take an errors argument")
172 if binary and newline is not None:
173 raise ValueError("binary mode doesn't take a newline argument")
174 raw = FileIO(file,
175 (reading and "r" or "") +
176 (writing and "w" or "") +
177 (appending and "a" or "") +
178 (updating and "+" or ""),
179 closefd)
180 if buffering is None:
181 buffering = -1
182 line_buffering = False
183 if buffering == 1 or buffering < 0 and raw.isatty():
184 buffering = -1
185 line_buffering = True
186 if buffering < 0:
187 buffering = DEFAULT_BUFFER_SIZE
188 try:
189 bs = os.fstat(raw.fileno()).st_blksize
190 except (os.error, AttributeError):
191 pass
192 else:
193 if bs > 1:
194 buffering = bs
195 if buffering < 0:
196 raise ValueError("invalid buffering size")
197 if buffering == 0:
198 if binary:
199 return raw
200 raise ValueError("can't have unbuffered text I/O")
201 if updating:
202 buffer = BufferedRandom(raw, buffering)
203 elif writing or appending:
204 buffer = BufferedWriter(raw, buffering)
205 elif reading:
206 buffer = BufferedReader(raw, buffering)
207 else:
208 raise ValueError("unknown mode: %r" % mode)
209 if binary:
210 return buffer
211 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
212 text.mode = mode
213 return text
214
215
216class DocDescriptor:
217 """Helper for builtins.open.__doc__
218 """
219 def __get__(self, obj, typ):
220 return (
221 "open(file, mode='r', buffering=None, encoding=None, "
222 "errors=None, newline=None, closefd=True)\n\n" +
223 open.__doc__)
224
225class OpenWrapper:
226 """Wrapper for builtins.open
227
228 Trick so that open won't become a bound method when stored
229 as a class variable (as dbm.dumb does).
230
231 See initstdio() in Python/pythonrun.c.
232 """
233 __doc__ = DocDescriptor()
234
235 def __new__(cls, *args, **kwargs):
236 return open(*args, **kwargs)
237
238
239class UnsupportedOperation(ValueError, IOError):
240 pass
241
242
243class IOBase(metaclass=abc.ABCMeta):
244
245 """The abstract base class for all I/O classes, acting on streams of
246 bytes. There is no public constructor.
247
248 This class provides dummy implementations for many methods that
249 derived classes can override selectively; the default implementations
250 represent a file that cannot be read, written or seeked.
251
252 Even though IOBase does not declare read, readinto, or write because
253 their signatures will vary, implementations and clients should
254 consider those methods part of the interface. Also, implementations
255 may raise a IOError when operations they do not support are called.
256
257 The basic type used for binary data read from or written to a file is
258 bytes. bytearrays are accepted too, and in some cases (such as
259 readinto) needed. Text I/O classes work with str data.
260
261 Note that calling any method (even inquiries) on a closed stream is
262 undefined. Implementations may raise IOError in this case.
263
264 IOBase (and its subclasses) support the iterator protocol, meaning
265 that an IOBase object can be iterated over yielding the lines in a
266 stream.
267
268 IOBase also supports the :keyword:`with` statement. In this example,
269 fp is closed after the suite of the with statement is complete:
270
271 with open('spam.txt', 'r') as fp:
272 fp.write('Spam and eggs!')
273 """
274
275 ### Internal ###
276
277 def _unsupported(self, name: str) -> IOError:
278 """Internal: raise an exception for unsupported operations."""
279 raise UnsupportedOperation("%s.%s() not supported" %
280 (self.__class__.__name__, name))
281
282 ### Positioning ###
283
284 def seek(self, pos: int, whence: int = 0) -> int:
285 """Change stream position.
286
287 Change the stream position to byte offset offset. offset is
288 interpreted relative to the position indicated by whence. Values
289 for whence are:
290
291 * 0 -- start of stream (the default); offset should be zero or positive
292 * 1 -- current stream position; offset may be negative
293 * 2 -- end of stream; offset is usually negative
294
295 Return the new absolute position.
296 """
297 self._unsupported("seek")
298
299 def tell(self) -> int:
300 """Return current stream position."""
301 return self.seek(0, 1)
302
303 def truncate(self, pos: int = None) -> int:
304 """Truncate file to size bytes.
305
306 Size defaults to the current IO position as reported by tell(). Return
307 the new size.
308 """
309 self._unsupported("truncate")
310
311 ### Flush and close ###
312
313 def flush(self) -> None:
314 """Flush write buffers, if applicable.
315
316 This is not implemented for read-only and non-blocking streams.
317 """
318 # XXX Should this return the number of bytes written???
319
320 __closed = False
321
322 def close(self) -> None:
323 """Flush and close the IO object.
324
325 This method has no effect if the file is already closed.
326 """
327 if not self.__closed:
328 try:
329 self.flush()
330 except IOError:
331 pass # If flush() fails, just give up
332 self.__closed = True
333
334 def __del__(self) -> None:
335 """Destructor. Calls close()."""
336 # The try/except block is in case this is called at program
337 # exit time, when it's possible that globals have already been
338 # deleted, and then the close() call might fail. Since
339 # there's nothing we can do about such failures and they annoy
340 # the end users, we suppress the traceback.
341 try:
342 self.close()
343 except:
344 pass
345
346 ### Inquiries ###
347
348 def seekable(self) -> bool:
349 """Return whether object supports random access.
350
351 If False, seek(), tell() and truncate() will raise IOError.
352 This method may need to do a test seek().
353 """
354 return False
355
356 def _checkSeekable(self, msg=None):
357 """Internal: raise an IOError if file is not seekable
358 """
359 if not self.seekable():
360 raise IOError("File or stream is not seekable."
361 if msg is None else msg)
362
363
364 def readable(self) -> bool:
365 """Return whether object was opened for reading.
366
367 If False, read() will raise IOError.
368 """
369 return False
370
371 def _checkReadable(self, msg=None):
372 """Internal: raise an IOError if file is not readable
373 """
374 if not self.readable():
375 raise IOError("File or stream is not readable."
376 if msg is None else msg)
377
378 def writable(self) -> bool:
379 """Return whether object was opened for writing.
380
381 If False, write() and truncate() will raise IOError.
382 """
383 return False
384
385 def _checkWritable(self, msg=None):
386 """Internal: raise an IOError if file is not writable
387 """
388 if not self.writable():
389 raise IOError("File or stream is not writable."
390 if msg is None else msg)
391
392 @property
393 def closed(self):
394 """closed: bool. True iff the file has been closed.
395
396 For backwards compatibility, this is a property, not a predicate.
397 """
398 return self.__closed
399
400 def _checkClosed(self, msg=None):
401 """Internal: raise an ValueError if file is closed
402 """
403 if self.closed:
404 raise ValueError("I/O operation on closed file."
405 if msg is None else msg)
406
407 ### Context manager ###
408
409 def __enter__(self) -> "IOBase": # That's a forward reference
410 """Context management protocol. Returns self."""
411 self._checkClosed()
412 return self
413
414 def __exit__(self, *args) -> None:
415 """Context management protocol. Calls close()"""
416 self.close()
417
418 ### Lower-level APIs ###
419
420 # XXX Should these be present even if unimplemented?
421
422 def fileno(self) -> int:
423 """Returns underlying file descriptor if one exists.
424
425 An IOError is raised if the IO object does not use a file descriptor.
426 """
427 self._unsupported("fileno")
428
429 def isatty(self) -> bool:
430 """Return whether this is an 'interactive' stream.
431
432 Return False if it can't be determined.
433 """
434 self._checkClosed()
435 return False
436
437 ### Readline[s] and writelines ###
438
439 def readline(self, limit: int = -1) -> bytes:
440 r"""Read and return a line from the stream.
441
442 If limit is specified, at most limit bytes will be read.
443
444 The line terminator is always b'\n' for binary files; for text
445 files, the newlines argument to open can be used to select the line
446 terminator(s) recognized.
447 """
448 # For backwards compatibility, a (slowish) readline().
449 if hasattr(self, "peek"):
450 def nreadahead():
451 readahead = self.peek(1)
452 if not readahead:
453 return 1
454 n = (readahead.find(b"\n") + 1) or len(readahead)
455 if limit >= 0:
456 n = min(n, limit)
457 return n
458 else:
459 def nreadahead():
460 return 1
461 if limit is None:
462 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +0000463 elif not isinstance(limit, int):
464 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000465 res = bytearray()
466 while limit < 0 or len(res) < limit:
467 b = self.read(nreadahead())
468 if not b:
469 break
470 res += b
471 if res.endswith(b"\n"):
472 break
473 return bytes(res)
474
475 def __iter__(self):
476 self._checkClosed()
477 return self
478
479 def __next__(self):
480 line = self.readline()
481 if not line:
482 raise StopIteration
483 return line
484
485 def readlines(self, hint=None):
486 """Return a list of lines from the stream.
487
488 hint can be specified to control the number of lines read: no more
489 lines will be read if the total size (in bytes/characters) of all
490 lines so far exceeds hint.
491 """
492 if hint is None or hint <= 0:
493 return list(self)
494 n = 0
495 lines = []
496 for line in self:
497 lines.append(line)
498 n += len(line)
499 if n >= hint:
500 break
501 return lines
502
503 def writelines(self, lines):
504 self._checkClosed()
505 for line in lines:
506 self.write(line)
507
508io.IOBase.register(IOBase)
509
510
511class RawIOBase(IOBase):
512
513 """Base class for raw binary I/O."""
514
515 # The read() method is implemented by calling readinto(); derived
516 # classes that want to support read() only need to implement
517 # readinto() as a primitive operation. In general, readinto() can be
518 # more efficient than read().
519
520 # (It would be tempting to also provide an implementation of
521 # readinto() in terms of read(), in case the latter is a more suitable
522 # primitive operation, but that would lead to nasty recursion in case
523 # a subclass doesn't implement either.)
524
525 def read(self, n: int = -1) -> bytes:
526 """Read and return up to n bytes.
527
528 Returns an empty bytes object on EOF, or None if the object is
529 set not to block and has no data to read.
530 """
531 if n is None:
532 n = -1
533 if n < 0:
534 return self.readall()
535 b = bytearray(n.__index__())
536 n = self.readinto(b)
537 del b[n:]
538 return bytes(b)
539
540 def readall(self):
541 """Read until EOF, using multiple read() call."""
542 res = bytearray()
543 while True:
544 data = self.read(DEFAULT_BUFFER_SIZE)
545 if not data:
546 break
547 res += data
548 return bytes(res)
549
550 def readinto(self, b: bytearray) -> int:
551 """Read up to len(b) bytes into b.
552
553 Returns number of bytes read (0 for EOF), or None if the object
554 is set not to block as has no data to read.
555 """
556 self._unsupported("readinto")
557
558 def write(self, b: bytes) -> int:
559 """Write the given buffer to the IO stream.
560
561 Returns the number of bytes written, which may be less than len(b).
562 """
563 self._unsupported("write")
564
565io.RawIOBase.register(RawIOBase)
566from _io import FileIO
567RawIOBase.register(FileIO)
568
569
570class BufferedIOBase(IOBase):
571
572 """Base class for buffered IO objects.
573
574 The main difference with RawIOBase is that the read() method
575 supports omitting the size argument, and does not have a default
576 implementation that defers to readinto().
577
578 In addition, read(), readinto() and write() may raise
579 BlockingIOError if the underlying raw stream is in non-blocking
580 mode and not ready; unlike their raw counterparts, they will never
581 return None.
582
583 A typical implementation should not inherit from a RawIOBase
584 implementation, but wrap one.
585 """
586
587 def read(self, n: int = None) -> bytes:
588 """Read and return up to n bytes.
589
590 If the argument is omitted, None, or negative, reads and
591 returns all data until EOF.
592
593 If the argument is positive, and the underlying raw stream is
594 not 'interactive', multiple raw reads may be issued to satisfy
595 the byte count (unless EOF is reached first). But for
596 interactive raw streams (XXX and for pipes?), at most one raw
597 read will be issued, and a short result does not imply that
598 EOF is imminent.
599
600 Returns an empty bytes array on EOF.
601
602 Raises BlockingIOError if the underlying raw stream has no
603 data at the moment.
604 """
605 self._unsupported("read")
606
607 def read1(self, n: int=None) -> bytes:
608 """Read up to n bytes with at most one read() system call."""
609 self._unsupported("read1")
610
611 def readinto(self, b: bytearray) -> int:
612 """Read up to len(b) bytes into b.
613
614 Like read(), this may issue multiple reads to the underlying raw
615 stream, unless the latter is 'interactive'.
616
617 Returns the number of bytes read (0 for EOF).
618
619 Raises BlockingIOError if the underlying raw stream has no
620 data at the moment.
621 """
622 # XXX This ought to work with anything that supports the buffer API
623 data = self.read(len(b))
624 n = len(data)
625 try:
626 b[:n] = data
627 except TypeError as err:
628 import array
629 if not isinstance(b, array.array):
630 raise err
631 b[:n] = array.array('b', data)
632 return n
633
634 def write(self, b: bytes) -> int:
635 """Write the given buffer to the IO stream.
636
637 Return the number of bytes written, which is never less than
638 len(b).
639
640 Raises BlockingIOError if the buffer is full and the
641 underlying raw stream cannot accept more data at the moment.
642 """
643 self._unsupported("write")
644
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000645 def detach(self) -> None:
646 """
647 Separate the underlying raw stream from the buffer and return it.
648
649 After the raw stream has been detached, the buffer is in an unusable
650 state.
651 """
652 self._unsupported("detach")
653
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000654io.BufferedIOBase.register(BufferedIOBase)
655
656
657class _BufferedIOMixin(BufferedIOBase):
658
659 """A mixin implementation of BufferedIOBase with an underlying raw stream.
660
661 This passes most requests on to the underlying raw stream. It
662 does *not* provide implementations of read(), readinto() or
663 write().
664 """
665
666 def __init__(self, raw):
667 self.raw = raw
668
669 ### Positioning ###
670
671 def seek(self, pos, whence=0):
672 new_position = self.raw.seek(pos, whence)
673 if new_position < 0:
674 raise IOError("seek() returned an invalid position")
675 return new_position
676
677 def tell(self):
678 pos = self.raw.tell()
679 if pos < 0:
680 raise IOError("tell() returned an invalid position")
681 return pos
682
683 def truncate(self, pos=None):
684 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
685 # and a flush may be necessary to synch both views of the current
686 # file state.
687 self.flush()
688
689 if pos is None:
690 pos = self.tell()
691 # XXX: Should seek() be used, instead of passing the position
692 # XXX directly to truncate?
693 return self.raw.truncate(pos)
694
695 ### Flush and close ###
696
697 def flush(self):
698 self.raw.flush()
699
700 def close(self):
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000701 if not self.closed and self.raw is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000702 try:
703 self.flush()
704 except IOError:
705 pass # If flush() fails, just give up
706 self.raw.close()
707
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000708 def detach(self):
709 if self.raw is None:
710 raise ValueError("raw stream already detached")
711 self.flush()
712 raw = self.raw
713 self.raw = None
714 return raw
715
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000716 ### Inquiries ###
717
718 def seekable(self):
719 return self.raw.seekable()
720
721 def readable(self):
722 return self.raw.readable()
723
724 def writable(self):
725 return self.raw.writable()
726
727 @property
728 def closed(self):
729 return self.raw.closed
730
731 @property
732 def name(self):
733 return self.raw.name
734
735 @property
736 def mode(self):
737 return self.raw.mode
738
Antoine Pitrou716c4442009-05-23 19:04:03 +0000739 def __repr__(self):
740 clsname = self.__class__.__name__
741 try:
742 name = self.name
743 except AttributeError:
744 return "<_pyio.{0}>".format(clsname)
745 else:
746 return "<_pyio.{0} name={1!r}>".format(clsname, name)
747
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000748 ### Lower-level APIs ###
749
750 def fileno(self):
751 return self.raw.fileno()
752
753 def isatty(self):
754 return self.raw.isatty()
755
756
757class BytesIO(BufferedIOBase):
758
759 """Buffered I/O implementation using an in-memory bytes buffer."""
760
761 def __init__(self, initial_bytes=None):
762 buf = bytearray()
763 if initial_bytes is not None:
764 buf += initial_bytes
765 self._buffer = buf
766 self._pos = 0
767
Alexandre Vassalotticf76e1a2009-07-22 03:24:36 +0000768 def __getstate__(self):
769 if self.closed:
770 raise ValueError("__getstate__ on closed file")
771 return self.__dict__.copy()
772
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000773 def getvalue(self):
774 """Return the bytes value (contents) of the buffer
775 """
776 if self.closed:
777 raise ValueError("getvalue on closed file")
778 return bytes(self._buffer)
779
780 def read(self, n=None):
781 if self.closed:
782 raise ValueError("read from closed file")
783 if n is None:
784 n = -1
785 if n < 0:
786 n = len(self._buffer)
787 if len(self._buffer) <= self._pos:
788 return b""
789 newpos = min(len(self._buffer), self._pos + n)
790 b = self._buffer[self._pos : newpos]
791 self._pos = newpos
792 return bytes(b)
793
794 def read1(self, n):
795 """This is the same as read.
796 """
797 return self.read(n)
798
799 def write(self, b):
800 if self.closed:
801 raise ValueError("write to closed file")
802 if isinstance(b, str):
803 raise TypeError("can't write str to binary stream")
804 n = len(b)
805 if n == 0:
806 return 0
807 pos = self._pos
808 if pos > len(self._buffer):
809 # Inserts null bytes between the current end of the file
810 # and the new write position.
811 padding = b'\x00' * (pos - len(self._buffer))
812 self._buffer += padding
813 self._buffer[pos:pos + n] = b
814 self._pos += n
815 return n
816
817 def seek(self, pos, whence=0):
818 if self.closed:
819 raise ValueError("seek on closed file")
820 try:
821 pos = pos.__index__()
822 except AttributeError as err:
823 raise TypeError("an integer is required") from err
824 if whence == 0:
825 if pos < 0:
826 raise ValueError("negative seek position %r" % (pos,))
827 self._pos = pos
828 elif whence == 1:
829 self._pos = max(0, self._pos + pos)
830 elif whence == 2:
831 self._pos = max(0, len(self._buffer) + pos)
832 else:
833 raise ValueError("invalid whence value")
834 return self._pos
835
836 def tell(self):
837 if self.closed:
838 raise ValueError("tell on closed file")
839 return self._pos
840
841 def truncate(self, pos=None):
842 if self.closed:
843 raise ValueError("truncate on closed file")
844 if pos is None:
845 pos = self._pos
846 elif pos < 0:
847 raise ValueError("negative truncate position %r" % (pos,))
848 del self._buffer[pos:]
849 return self.seek(pos)
850
851 def readable(self):
852 return True
853
854 def writable(self):
855 return True
856
857 def seekable(self):
858 return True
859
860
861class BufferedReader(_BufferedIOMixin):
862
863 """BufferedReader(raw[, buffer_size])
864
865 A buffer for a readable, sequential BaseRawIO object.
866
867 The constructor creates a BufferedReader for the given readable raw
868 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
869 is used.
870 """
871
872 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
873 """Create a new buffered reader using the given readable raw IO object.
874 """
Antoine Pitroucf4c7492009-04-19 00:09:36 +0000875 if not raw.readable():
876 raise IOError('"raw" argument must be readable.')
877
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000878 _BufferedIOMixin.__init__(self, raw)
879 if buffer_size <= 0:
880 raise ValueError("invalid buffer size")
881 self.buffer_size = buffer_size
882 self._reset_read_buf()
883 self._read_lock = Lock()
884
885 def _reset_read_buf(self):
886 self._read_buf = b""
887 self._read_pos = 0
888
889 def read(self, n=None):
890 """Read n bytes.
891
892 Returns exactly n bytes of data unless the underlying raw IO
893 stream reaches EOF or if the call would block in non-blocking
894 mode. If n is negative, read until EOF or until read() would
895 block.
896 """
897 if n is not None and n < -1:
898 raise ValueError("invalid number of bytes to read")
899 with self._read_lock:
900 return self._read_unlocked(n)
901
902 def _read_unlocked(self, n=None):
903 nodata_val = b""
904 empty_values = (b"", None)
905 buf = self._read_buf
906 pos = self._read_pos
907
908 # Special case for when the number of bytes to read is unspecified.
909 if n is None or n == -1:
910 self._reset_read_buf()
911 chunks = [buf[pos:]] # Strip the consumed bytes.
912 current_size = 0
913 while True:
914 # Read until EOF or until read() would block.
915 chunk = self.raw.read()
916 if chunk in empty_values:
917 nodata_val = chunk
918 break
919 current_size += len(chunk)
920 chunks.append(chunk)
921 return b"".join(chunks) or nodata_val
922
923 # The number of bytes to read is specified, return at most n bytes.
924 avail = len(buf) - pos # Length of the available buffered data.
925 if n <= avail:
926 # Fast path: the data to read is fully buffered.
927 self._read_pos += n
928 return buf[pos:pos+n]
929 # Slow path: read from the stream until enough bytes are read,
930 # or until an EOF occurs or until read() would block.
931 chunks = [buf[pos:]]
932 wanted = max(self.buffer_size, n)
933 while avail < n:
934 chunk = self.raw.read(wanted)
935 if chunk in empty_values:
936 nodata_val = chunk
937 break
938 avail += len(chunk)
939 chunks.append(chunk)
940 # n is more then avail only when an EOF occurred or when
941 # read() would have blocked.
942 n = min(n, avail)
943 out = b"".join(chunks)
944 self._read_buf = out[n:] # Save the extra data in the buffer.
945 self._read_pos = 0
946 return out[:n] if out else nodata_val
947
948 def peek(self, n=0):
949 """Returns buffered bytes without advancing the position.
950
951 The argument indicates a desired minimal number of bytes; we
952 do at most one raw read to satisfy it. We never return more
953 than self.buffer_size.
954 """
955 with self._read_lock:
956 return self._peek_unlocked(n)
957
958 def _peek_unlocked(self, n=0):
959 want = min(n, self.buffer_size)
960 have = len(self._read_buf) - self._read_pos
961 if have < want or have <= 0:
962 to_read = self.buffer_size - have
963 current = self.raw.read(to_read)
964 if current:
965 self._read_buf = self._read_buf[self._read_pos:] + current
966 self._read_pos = 0
967 return self._read_buf[self._read_pos:]
968
969 def read1(self, n):
970 """Reads up to n bytes, with at most one read() system call."""
971 # Returns up to n bytes. If at least one byte is buffered, we
972 # only return buffered bytes. Otherwise, we do one raw read.
973 if n < 0:
974 raise ValueError("number of bytes to read must be positive")
975 if n == 0:
976 return b""
977 with self._read_lock:
978 self._peek_unlocked(1)
979 return self._read_unlocked(
980 min(n, len(self._read_buf) - self._read_pos))
981
982 def tell(self):
983 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
984
985 def seek(self, pos, whence=0):
986 if not (0 <= whence <= 2):
987 raise ValueError("invalid whence value")
988 with self._read_lock:
989 if whence == 1:
990 pos -= len(self._read_buf) - self._read_pos
991 pos = _BufferedIOMixin.seek(self, pos, whence)
992 self._reset_read_buf()
993 return pos
994
995class BufferedWriter(_BufferedIOMixin):
996
997 """A buffer for a writeable sequential RawIO object.
998
999 The constructor creates a BufferedWriter for the given writeable raw
1000 stream. If the buffer_size is not given, it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001001 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001002 """
1003
Benjamin Peterson59406a92009-03-26 17:10:29 +00001004 _warning_stack_offset = 2
1005
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001006 def __init__(self, raw,
1007 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001008 if not raw.writable():
1009 raise IOError('"raw" argument must be writable.')
1010
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001011 _BufferedIOMixin.__init__(self, raw)
1012 if buffer_size <= 0:
1013 raise ValueError("invalid buffer size")
Benjamin Peterson59406a92009-03-26 17:10:29 +00001014 if max_buffer_size is not None:
1015 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1016 self._warning_stack_offset)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001017 self.buffer_size = buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001018 self._write_buf = bytearray()
1019 self._write_lock = Lock()
1020
1021 def write(self, b):
1022 if self.closed:
1023 raise ValueError("write to closed file")
1024 if isinstance(b, str):
1025 raise TypeError("can't write str to binary stream")
1026 with self._write_lock:
1027 # XXX we can implement some more tricks to try and avoid
1028 # partial writes
1029 if len(self._write_buf) > self.buffer_size:
1030 # We're full, so let's pre-flush the buffer
1031 try:
1032 self._flush_unlocked()
1033 except BlockingIOError as e:
1034 # We can't accept anything else.
1035 # XXX Why not just let the exception pass through?
1036 raise BlockingIOError(e.errno, e.strerror, 0)
1037 before = len(self._write_buf)
1038 self._write_buf.extend(b)
1039 written = len(self._write_buf) - before
1040 if len(self._write_buf) > self.buffer_size:
1041 try:
1042 self._flush_unlocked()
1043 except BlockingIOError as e:
Benjamin Peterson394ee002009-03-05 22:33:59 +00001044 if len(self._write_buf) > self.buffer_size:
1045 # We've hit the buffer_size. We have to accept a partial
1046 # write and cut back our buffer.
1047 overage = len(self._write_buf) - self.buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001048 written -= overage
Benjamin Peterson394ee002009-03-05 22:33:59 +00001049 self._write_buf = self._write_buf[:self.buffer_size]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001050 raise BlockingIOError(e.errno, e.strerror, written)
1051 return written
1052
1053 def truncate(self, pos=None):
1054 with self._write_lock:
1055 self._flush_unlocked()
1056 if pos is None:
1057 pos = self.raw.tell()
1058 return self.raw.truncate(pos)
1059
1060 def flush(self):
1061 with self._write_lock:
1062 self._flush_unlocked()
1063
1064 def _flush_unlocked(self):
1065 if self.closed:
1066 raise ValueError("flush of closed file")
1067 written = 0
1068 try:
1069 while self._write_buf:
1070 n = self.raw.write(self._write_buf)
1071 if n > len(self._write_buf) or n < 0:
1072 raise IOError("write() returned incorrect number of bytes")
1073 del self._write_buf[:n]
1074 written += n
1075 except BlockingIOError as e:
1076 n = e.characters_written
1077 del self._write_buf[:n]
1078 written += n
1079 raise BlockingIOError(e.errno, e.strerror, written)
1080
1081 def tell(self):
1082 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1083
1084 def seek(self, pos, whence=0):
1085 if not (0 <= whence <= 2):
1086 raise ValueError("invalid whence")
1087 with self._write_lock:
1088 self._flush_unlocked()
1089 return _BufferedIOMixin.seek(self, pos, whence)
1090
1091
1092class BufferedRWPair(BufferedIOBase):
1093
1094 """A buffered reader and writer object together.
1095
1096 A buffered reader object and buffered writer object put together to
1097 form a sequential IO object that can read and write. This is typically
1098 used with a socket or two-way pipe.
1099
1100 reader and writer are RawIOBase objects that are readable and
1101 writeable respectively. If the buffer_size is omitted it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001102 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001103 """
1104
1105 # XXX The usefulness of this (compared to having two separate IO
1106 # objects) is questionable.
1107
1108 def __init__(self, reader, writer,
1109 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1110 """Constructor.
1111
1112 The arguments are two RawIO instances.
1113 """
Benjamin Peterson59406a92009-03-26 17:10:29 +00001114 if max_buffer_size is not None:
1115 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001116
1117 if not reader.readable():
1118 raise IOError('"reader" argument must be readable.')
1119
1120 if not writer.writable():
1121 raise IOError('"writer" argument must be writable.')
1122
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001123 self.reader = BufferedReader(reader, buffer_size)
Benjamin Peterson59406a92009-03-26 17:10:29 +00001124 self.writer = BufferedWriter(writer, buffer_size)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001125
1126 def read(self, n=None):
1127 if n is None:
1128 n = -1
1129 return self.reader.read(n)
1130
1131 def readinto(self, b):
1132 return self.reader.readinto(b)
1133
1134 def write(self, b):
1135 return self.writer.write(b)
1136
1137 def peek(self, n=0):
1138 return self.reader.peek(n)
1139
1140 def read1(self, n):
1141 return self.reader.read1(n)
1142
1143 def readable(self):
1144 return self.reader.readable()
1145
1146 def writable(self):
1147 return self.writer.writable()
1148
1149 def flush(self):
1150 return self.writer.flush()
1151
1152 def close(self):
1153 self.writer.close()
1154 self.reader.close()
1155
1156 def isatty(self):
1157 return self.reader.isatty() or self.writer.isatty()
1158
1159 @property
1160 def closed(self):
1161 return self.writer.closed
1162
1163
1164class BufferedRandom(BufferedWriter, BufferedReader):
1165
1166 """A buffered interface to random access streams.
1167
1168 The constructor creates a reader and writer for a seekable stream,
1169 raw, given in the first argument. If the buffer_size is omitted it
Benjamin Peterson59406a92009-03-26 17:10:29 +00001170 defaults to DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001171 """
1172
Benjamin Peterson59406a92009-03-26 17:10:29 +00001173 _warning_stack_offset = 3
1174
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001175 def __init__(self, raw,
1176 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1177 raw._checkSeekable()
1178 BufferedReader.__init__(self, raw, buffer_size)
1179 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1180
1181 def seek(self, pos, whence=0):
1182 if not (0 <= whence <= 2):
1183 raise ValueError("invalid whence")
1184 self.flush()
1185 if self._read_buf:
1186 # Undo read ahead.
1187 with self._read_lock:
1188 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1189 # First do the raw seek, then empty the read buffer, so that
1190 # if the raw seek fails, we don't lose buffered data forever.
1191 pos = self.raw.seek(pos, whence)
1192 with self._read_lock:
1193 self._reset_read_buf()
1194 if pos < 0:
1195 raise IOError("seek() returned invalid position")
1196 return pos
1197
1198 def tell(self):
1199 if self._write_buf:
1200 return BufferedWriter.tell(self)
1201 else:
1202 return BufferedReader.tell(self)
1203
1204 def truncate(self, pos=None):
1205 if pos is None:
1206 pos = self.tell()
1207 # Use seek to flush the read buffer.
1208 self.seek(pos)
1209 return BufferedWriter.truncate(self)
1210
1211 def read(self, n=None):
1212 if n is None:
1213 n = -1
1214 self.flush()
1215 return BufferedReader.read(self, n)
1216
1217 def readinto(self, b):
1218 self.flush()
1219 return BufferedReader.readinto(self, b)
1220
1221 def peek(self, n=0):
1222 self.flush()
1223 return BufferedReader.peek(self, n)
1224
1225 def read1(self, n):
1226 self.flush()
1227 return BufferedReader.read1(self, n)
1228
1229 def write(self, b):
1230 if self._read_buf:
1231 # Undo readahead
1232 with self._read_lock:
1233 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1234 self._reset_read_buf()
1235 return BufferedWriter.write(self, b)
1236
1237
1238class TextIOBase(IOBase):
1239
1240 """Base class for text I/O.
1241
1242 This class provides a character and line based interface to stream
1243 I/O. There is no readinto method because Python's character strings
1244 are immutable. There is no public constructor.
1245 """
1246
1247 def read(self, n: int = -1) -> str:
1248 """Read at most n characters from stream.
1249
1250 Read from underlying buffer until we have n characters or we hit EOF.
1251 If n is negative or omitted, read until EOF.
1252 """
1253 self._unsupported("read")
1254
1255 def write(self, s: str) -> int:
1256 """Write string s to stream."""
1257 self._unsupported("write")
1258
1259 def truncate(self, pos: int = None) -> int:
1260 """Truncate size to pos."""
1261 self._unsupported("truncate")
1262
1263 def readline(self) -> str:
1264 """Read until newline or EOF.
1265
1266 Returns an empty string if EOF is hit immediately.
1267 """
1268 self._unsupported("readline")
1269
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001270 def detach(self) -> None:
1271 """
1272 Separate the underlying buffer from the TextIOBase and return it.
1273
1274 After the underlying buffer has been detached, the TextIO is in an
1275 unusable state.
1276 """
1277 self._unsupported("detach")
1278
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001279 @property
1280 def encoding(self):
1281 """Subclasses should override."""
1282 return None
1283
1284 @property
1285 def newlines(self):
1286 """Line endings translated so far.
1287
1288 Only line endings translated during reading are considered.
1289
1290 Subclasses should override.
1291 """
1292 return None
1293
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001294 @property
1295 def errors(self):
1296 """Error setting of the decoder or encoder.
1297
1298 Subclasses should override."""
1299 return None
1300
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001301io.TextIOBase.register(TextIOBase)
1302
1303
1304class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1305 r"""Codec used when reading a file in universal newlines mode. It wraps
1306 another incremental decoder, translating \r\n and \r into \n. It also
1307 records the types of newlines encountered. When used with
1308 translate=False, it ensures that the newline sequence is returned in
1309 one piece.
1310 """
1311 def __init__(self, decoder, translate, errors='strict'):
1312 codecs.IncrementalDecoder.__init__(self, errors=errors)
1313 self.translate = translate
1314 self.decoder = decoder
1315 self.seennl = 0
1316 self.pendingcr = False
1317
1318 def decode(self, input, final=False):
1319 # decode input (with the eventual \r from a previous pass)
1320 if self.decoder is None:
1321 output = input
1322 else:
1323 output = self.decoder.decode(input, final=final)
1324 if self.pendingcr and (output or final):
1325 output = "\r" + output
1326 self.pendingcr = False
1327
1328 # retain last \r even when not translating data:
1329 # then readline() is sure to get \r\n in one pass
1330 if output.endswith("\r") and not final:
1331 output = output[:-1]
1332 self.pendingcr = True
1333
1334 # Record which newlines are read
1335 crlf = output.count('\r\n')
1336 cr = output.count('\r') - crlf
1337 lf = output.count('\n') - crlf
1338 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1339 | (crlf and self._CRLF)
1340
1341 if self.translate:
1342 if crlf:
1343 output = output.replace("\r\n", "\n")
1344 if cr:
1345 output = output.replace("\r", "\n")
1346
1347 return output
1348
1349 def getstate(self):
1350 if self.decoder is None:
1351 buf = b""
1352 flag = 0
1353 else:
1354 buf, flag = self.decoder.getstate()
1355 flag <<= 1
1356 if self.pendingcr:
1357 flag |= 1
1358 return buf, flag
1359
1360 def setstate(self, state):
1361 buf, flag = state
1362 self.pendingcr = bool(flag & 1)
1363 if self.decoder is not None:
1364 self.decoder.setstate((buf, flag >> 1))
1365
1366 def reset(self):
1367 self.seennl = 0
1368 self.pendingcr = False
1369 if self.decoder is not None:
1370 self.decoder.reset()
1371
1372 _LF = 1
1373 _CR = 2
1374 _CRLF = 4
1375
1376 @property
1377 def newlines(self):
1378 return (None,
1379 "\n",
1380 "\r",
1381 ("\r", "\n"),
1382 "\r\n",
1383 ("\n", "\r\n"),
1384 ("\r", "\r\n"),
1385 ("\r", "\n", "\r\n")
1386 )[self.seennl]
1387
1388
1389class TextIOWrapper(TextIOBase):
1390
1391 r"""Character and line based layer over a BufferedIOBase object, buffer.
1392
1393 encoding gives the name of the encoding that the stream will be
1394 decoded or encoded with. It defaults to locale.getpreferredencoding.
1395
1396 errors determines the strictness of encoding and decoding (see the
1397 codecs.register) and defaults to "strict".
1398
1399 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1400 handling of line endings. If it is None, universal newlines is
1401 enabled. With this enabled, on input, the lines endings '\n', '\r',
1402 or '\r\n' are translated to '\n' before being returned to the
1403 caller. Conversely, on output, '\n' is translated to the system
1404 default line seperator, os.linesep. If newline is any other of its
1405 legal values, that newline becomes the newline when the file is read
1406 and it is returned untranslated. On output, '\n' is converted to the
1407 newline.
1408
1409 If line_buffering is True, a call to flush is implied when a call to
1410 write contains a newline character.
1411 """
1412
1413 _CHUNK_SIZE = 2048
1414
1415 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1416 line_buffering=False):
1417 if newline is not None and not isinstance(newline, str):
1418 raise TypeError("illegal newline type: %r" % (type(newline),))
1419 if newline not in (None, "", "\n", "\r", "\r\n"):
1420 raise ValueError("illegal newline value: %r" % (newline,))
1421 if encoding is None:
1422 try:
1423 encoding = os.device_encoding(buffer.fileno())
1424 except (AttributeError, UnsupportedOperation):
1425 pass
1426 if encoding is None:
1427 try:
1428 import locale
1429 except ImportError:
1430 # Importing locale may fail if Python is being built
1431 encoding = "ascii"
1432 else:
1433 encoding = locale.getpreferredencoding()
1434
1435 if not isinstance(encoding, str):
1436 raise ValueError("invalid encoding: %r" % encoding)
1437
1438 if errors is None:
1439 errors = "strict"
1440 else:
1441 if not isinstance(errors, str):
1442 raise ValueError("invalid errors: %r" % errors)
1443
1444 self.buffer = buffer
1445 self._line_buffering = line_buffering
1446 self._encoding = encoding
1447 self._errors = errors
1448 self._readuniversal = not newline
1449 self._readtranslate = newline is None
1450 self._readnl = newline
1451 self._writetranslate = newline != ''
1452 self._writenl = newline or os.linesep
1453 self._encoder = None
1454 self._decoder = None
1455 self._decoded_chars = '' # buffer for text returned from decoder
1456 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1457 self._snapshot = None # info for reconstructing decoder state
1458 self._seekable = self._telling = self.buffer.seekable()
1459
Antoine Pitroue4501852009-05-14 18:55:55 +00001460 if self._seekable and self.writable():
1461 position = self.buffer.tell()
1462 if position != 0:
1463 try:
1464 self._get_encoder().setstate(0)
1465 except LookupError:
1466 # Sometimes the encoder doesn't exist
1467 pass
1468
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001469 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1470 # where dec_flags is the second (integer) item of the decoder state
1471 # and next_input is the chunk of input bytes that comes next after the
1472 # snapshot point. We use this to reconstruct decoder states in tell().
1473
1474 # Naming convention:
1475 # - "bytes_..." for integer variables that count input bytes
1476 # - "chars_..." for integer variables that count decoded characters
1477
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001478 def __repr__(self):
Antoine Pitrou716c4442009-05-23 19:04:03 +00001479 try:
1480 name = self.name
1481 except AttributeError:
1482 return "<_pyio.TextIOWrapper encoding={0!r}>".format(self.encoding)
1483 else:
1484 return "<_pyio.TextIOWrapper name={0!r} encoding={1!r}>".format(
1485 name, self.encoding)
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001486
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001487 @property
1488 def encoding(self):
1489 return self._encoding
1490
1491 @property
1492 def errors(self):
1493 return self._errors
1494
1495 @property
1496 def line_buffering(self):
1497 return self._line_buffering
1498
1499 def seekable(self):
1500 return self._seekable
1501
1502 def readable(self):
1503 return self.buffer.readable()
1504
1505 def writable(self):
1506 return self.buffer.writable()
1507
1508 def flush(self):
1509 self.buffer.flush()
1510 self._telling = self._seekable
1511
1512 def close(self):
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001513 if self.buffer is not None:
1514 try:
1515 self.flush()
1516 except IOError:
1517 pass # If flush() fails, just give up
1518 self.buffer.close()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001519
1520 @property
1521 def closed(self):
1522 return self.buffer.closed
1523
1524 @property
1525 def name(self):
1526 return self.buffer.name
1527
1528 def fileno(self):
1529 return self.buffer.fileno()
1530
1531 def isatty(self):
1532 return self.buffer.isatty()
1533
1534 def write(self, s: str):
1535 if self.closed:
1536 raise ValueError("write to closed file")
1537 if not isinstance(s, str):
1538 raise TypeError("can't write %s to text stream" %
1539 s.__class__.__name__)
1540 length = len(s)
1541 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1542 if haslf and self._writetranslate and self._writenl != "\n":
1543 s = s.replace("\n", self._writenl)
1544 encoder = self._encoder or self._get_encoder()
1545 # XXX What if we were just reading?
1546 b = encoder.encode(s)
1547 self.buffer.write(b)
1548 if self._line_buffering and (haslf or "\r" in s):
1549 self.flush()
1550 self._snapshot = None
1551 if self._decoder:
1552 self._decoder.reset()
1553 return length
1554
1555 def _get_encoder(self):
1556 make_encoder = codecs.getincrementalencoder(self._encoding)
1557 self._encoder = make_encoder(self._errors)
1558 return self._encoder
1559
1560 def _get_decoder(self):
1561 make_decoder = codecs.getincrementaldecoder(self._encoding)
1562 decoder = make_decoder(self._errors)
1563 if self._readuniversal:
1564 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1565 self._decoder = decoder
1566 return decoder
1567
1568 # The following three methods implement an ADT for _decoded_chars.
1569 # Text returned from the decoder is buffered here until the client
1570 # requests it by calling our read() or readline() method.
1571 def _set_decoded_chars(self, chars):
1572 """Set the _decoded_chars buffer."""
1573 self._decoded_chars = chars
1574 self._decoded_chars_used = 0
1575
1576 def _get_decoded_chars(self, n=None):
1577 """Advance into the _decoded_chars buffer."""
1578 offset = self._decoded_chars_used
1579 if n is None:
1580 chars = self._decoded_chars[offset:]
1581 else:
1582 chars = self._decoded_chars[offset:offset + n]
1583 self._decoded_chars_used += len(chars)
1584 return chars
1585
1586 def _rewind_decoded_chars(self, n):
1587 """Rewind the _decoded_chars buffer."""
1588 if self._decoded_chars_used < n:
1589 raise AssertionError("rewind decoded_chars out of bounds")
1590 self._decoded_chars_used -= n
1591
1592 def _read_chunk(self):
1593 """
1594 Read and decode the next chunk of data from the BufferedReader.
1595 """
1596
1597 # The return value is True unless EOF was reached. The decoded
1598 # string is placed in self._decoded_chars (replacing its previous
1599 # value). The entire input chunk is sent to the decoder, though
1600 # some of it may remain buffered in the decoder, yet to be
1601 # converted.
1602
1603 if self._decoder is None:
1604 raise ValueError("no decoder")
1605
1606 if self._telling:
1607 # To prepare for tell(), we need to snapshot a point in the
1608 # file where the decoder's input buffer is empty.
1609
1610 dec_buffer, dec_flags = self._decoder.getstate()
1611 # Given this, we know there was a valid snapshot point
1612 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1613
1614 # Read a chunk, decode it, and put the result in self._decoded_chars.
1615 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1616 eof = not input_chunk
1617 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1618
1619 if self._telling:
1620 # At the snapshot point, len(dec_buffer) bytes before the read,
1621 # the next input to be decoded is dec_buffer + input_chunk.
1622 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1623
1624 return not eof
1625
1626 def _pack_cookie(self, position, dec_flags=0,
1627 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1628 # The meaning of a tell() cookie is: seek to position, set the
1629 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1630 # into the decoder with need_eof as the EOF flag, then skip
1631 # chars_to_skip characters of the decoded result. For most simple
1632 # decoders, tell() will often just give a byte offset in the file.
1633 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1634 (chars_to_skip<<192) | bool(need_eof)<<256)
1635
1636 def _unpack_cookie(self, bigint):
1637 rest, position = divmod(bigint, 1<<64)
1638 rest, dec_flags = divmod(rest, 1<<64)
1639 rest, bytes_to_feed = divmod(rest, 1<<64)
1640 need_eof, chars_to_skip = divmod(rest, 1<<64)
1641 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1642
1643 def tell(self):
1644 if not self._seekable:
1645 raise IOError("underlying stream is not seekable")
1646 if not self._telling:
1647 raise IOError("telling position disabled by next() call")
1648 self.flush()
1649 position = self.buffer.tell()
1650 decoder = self._decoder
1651 if decoder is None or self._snapshot is None:
1652 if self._decoded_chars:
1653 # This should never happen.
1654 raise AssertionError("pending decoded text")
1655 return position
1656
1657 # Skip backward to the snapshot point (see _read_chunk).
1658 dec_flags, next_input = self._snapshot
1659 position -= len(next_input)
1660
1661 # How many decoded characters have been used up since the snapshot?
1662 chars_to_skip = self._decoded_chars_used
1663 if chars_to_skip == 0:
1664 # We haven't moved from the snapshot point.
1665 return self._pack_cookie(position, dec_flags)
1666
1667 # Starting from the snapshot position, we will walk the decoder
1668 # forward until it gives us enough decoded characters.
1669 saved_state = decoder.getstate()
1670 try:
1671 # Note our initial start point.
1672 decoder.setstate((b'', dec_flags))
1673 start_pos = position
1674 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1675 need_eof = 0
1676
1677 # Feed the decoder one byte at a time. As we go, note the
1678 # nearest "safe start point" before the current location
1679 # (a point where the decoder has nothing buffered, so seek()
1680 # can safely start from there and advance to this location).
1681 next_byte = bytearray(1)
1682 for next_byte[0] in next_input:
1683 bytes_fed += 1
1684 chars_decoded += len(decoder.decode(next_byte))
1685 dec_buffer, dec_flags = decoder.getstate()
1686 if not dec_buffer and chars_decoded <= chars_to_skip:
1687 # Decoder buffer is empty, so this is a safe start point.
1688 start_pos += bytes_fed
1689 chars_to_skip -= chars_decoded
1690 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1691 if chars_decoded >= chars_to_skip:
1692 break
1693 else:
1694 # We didn't get enough decoded data; signal EOF to get more.
1695 chars_decoded += len(decoder.decode(b'', final=True))
1696 need_eof = 1
1697 if chars_decoded < chars_to_skip:
1698 raise IOError("can't reconstruct logical file position")
1699
1700 # The returned cookie corresponds to the last safe start point.
1701 return self._pack_cookie(
1702 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1703 finally:
1704 decoder.setstate(saved_state)
1705
1706 def truncate(self, pos=None):
1707 self.flush()
1708 if pos is None:
1709 pos = self.tell()
1710 self.seek(pos)
1711 return self.buffer.truncate()
1712
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001713 def detach(self):
1714 if self.buffer is None:
1715 raise ValueError("buffer is already detached")
1716 self.flush()
1717 buffer = self.buffer
1718 self.buffer = None
1719 return buffer
1720
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001721 def seek(self, cookie, whence=0):
1722 if self.closed:
1723 raise ValueError("tell on closed file")
1724 if not self._seekable:
1725 raise IOError("underlying stream is not seekable")
1726 if whence == 1: # seek relative to current position
1727 if cookie != 0:
1728 raise IOError("can't do nonzero cur-relative seeks")
1729 # Seeking to the current position should attempt to
1730 # sync the underlying buffer with the current position.
1731 whence = 0
1732 cookie = self.tell()
1733 if whence == 2: # seek relative to end of file
1734 if cookie != 0:
1735 raise IOError("can't do nonzero end-relative seeks")
1736 self.flush()
1737 position = self.buffer.seek(0, 2)
1738 self._set_decoded_chars('')
1739 self._snapshot = None
1740 if self._decoder:
1741 self._decoder.reset()
1742 return position
1743 if whence != 0:
1744 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1745 (whence,))
1746 if cookie < 0:
1747 raise ValueError("negative seek position %r" % (cookie,))
1748 self.flush()
1749
1750 # The strategy of seek() is to go back to the safe start point
1751 # and replay the effect of read(chars_to_skip) from there.
1752 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1753 self._unpack_cookie(cookie)
1754
1755 # Seek back to the safe start point.
1756 self.buffer.seek(start_pos)
1757 self._set_decoded_chars('')
1758 self._snapshot = None
1759
1760 # Restore the decoder to its state from the safe start point.
Benjamin Peterson9363a652009-03-05 00:42:09 +00001761 if cookie == 0 and self._decoder:
1762 self._decoder.reset()
1763 elif self._decoder or dec_flags or chars_to_skip:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001764 self._decoder = self._decoder or self._get_decoder()
1765 self._decoder.setstate((b'', dec_flags))
1766 self._snapshot = (dec_flags, b'')
1767
1768 if chars_to_skip:
1769 # Just like _read_chunk, feed the decoder and save a snapshot.
1770 input_chunk = self.buffer.read(bytes_to_feed)
1771 self._set_decoded_chars(
1772 self._decoder.decode(input_chunk, need_eof))
1773 self._snapshot = (dec_flags, input_chunk)
1774
1775 # Skip chars_to_skip of the decoded characters.
1776 if len(self._decoded_chars) < chars_to_skip:
1777 raise IOError("can't restore logical file position")
1778 self._decoded_chars_used = chars_to_skip
1779
Antoine Pitroue4501852009-05-14 18:55:55 +00001780 # Finally, reset the encoder (merely useful for proper BOM handling)
1781 try:
1782 encoder = self._encoder or self._get_encoder()
1783 except LookupError:
1784 # Sometimes the encoder doesn't exist
1785 pass
1786 else:
1787 if cookie != 0:
1788 encoder.setstate(0)
1789 else:
1790 encoder.reset()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001791 return cookie
1792
1793 def read(self, n=None):
Benjamin Petersona1b49012009-03-31 23:11:32 +00001794 self._checkReadable()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001795 if n is None:
1796 n = -1
1797 decoder = self._decoder or self._get_decoder()
1798 if n < 0:
1799 # Read everything.
1800 result = (self._get_decoded_chars() +
1801 decoder.decode(self.buffer.read(), final=True))
1802 self._set_decoded_chars('')
1803 self._snapshot = None
1804 return result
1805 else:
1806 # Keep reading chunks until we have n characters to return.
1807 eof = False
1808 result = self._get_decoded_chars(n)
1809 while len(result) < n and not eof:
1810 eof = not self._read_chunk()
1811 result += self._get_decoded_chars(n - len(result))
1812 return result
1813
1814 def __next__(self):
1815 self._telling = False
1816 line = self.readline()
1817 if not line:
1818 self._snapshot = None
1819 self._telling = self._seekable
1820 raise StopIteration
1821 return line
1822
1823 def readline(self, limit=None):
1824 if self.closed:
1825 raise ValueError("read from closed file")
1826 if limit is None:
1827 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +00001828 elif not isinstance(limit, int):
1829 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001830
1831 # Grab all the decoded text (we will rewind any extra bits later).
1832 line = self._get_decoded_chars()
1833
1834 start = 0
1835 # Make the decoder if it doesn't already exist.
1836 if not self._decoder:
1837 self._get_decoder()
1838
1839 pos = endpos = None
1840 while True:
1841 if self._readtranslate:
1842 # Newlines are already translated, only search for \n
1843 pos = line.find('\n', start)
1844 if pos >= 0:
1845 endpos = pos + 1
1846 break
1847 else:
1848 start = len(line)
1849
1850 elif self._readuniversal:
1851 # Universal newline search. Find any of \r, \r\n, \n
1852 # The decoder ensures that \r\n are not split in two pieces
1853
1854 # In C we'd look for these in parallel of course.
1855 nlpos = line.find("\n", start)
1856 crpos = line.find("\r", start)
1857 if crpos == -1:
1858 if nlpos == -1:
1859 # Nothing found
1860 start = len(line)
1861 else:
1862 # Found \n
1863 endpos = nlpos + 1
1864 break
1865 elif nlpos == -1:
1866 # Found lone \r
1867 endpos = crpos + 1
1868 break
1869 elif nlpos < crpos:
1870 # Found \n
1871 endpos = nlpos + 1
1872 break
1873 elif nlpos == crpos + 1:
1874 # Found \r\n
1875 endpos = crpos + 2
1876 break
1877 else:
1878 # Found \r
1879 endpos = crpos + 1
1880 break
1881 else:
1882 # non-universal
1883 pos = line.find(self._readnl)
1884 if pos >= 0:
1885 endpos = pos + len(self._readnl)
1886 break
1887
1888 if limit >= 0 and len(line) >= limit:
1889 endpos = limit # reached length limit
1890 break
1891
1892 # No line ending seen yet - get more data'
1893 while self._read_chunk():
1894 if self._decoded_chars:
1895 break
1896 if self._decoded_chars:
1897 line += self._get_decoded_chars()
1898 else:
1899 # end of file
1900 self._set_decoded_chars('')
1901 self._snapshot = None
1902 return line
1903
1904 if limit >= 0 and endpos > limit:
1905 endpos = limit # don't exceed limit
1906
1907 # Rewind _decoded_chars to just after the line ending we found.
1908 self._rewind_decoded_chars(len(line) - endpos)
1909 return line[:endpos]
1910
1911 @property
1912 def newlines(self):
1913 return self._decoder.newlines if self._decoder else None
1914
1915
1916class StringIO(TextIOWrapper):
1917 """Text I/O implementation using an in-memory buffer.
1918
1919 The initial_value argument sets the value of object. The newline
1920 argument is like the one of TextIOWrapper's constructor.
1921 """
1922
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001923 def __init__(self, initial_value="", newline="\n"):
1924 super(StringIO, self).__init__(BytesIO(),
1925 encoding="utf-8",
1926 errors="strict",
1927 newline=newline)
Antoine Pitrou11446482009-04-04 14:09:30 +00001928 # Issue #5645: make universal newlines semantics the same as in the
1929 # C version, even under Windows.
1930 if newline is None:
1931 self._writetranslate = False
Alexandre Vassalottid2bb18b2009-07-22 03:07:33 +00001932 if initial_value is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001933 if not isinstance(initial_value, str):
Alexandre Vassalottid2bb18b2009-07-22 03:07:33 +00001934 raise TypeError("initial_value must be str or None, not {0}"
1935 .format(type(initial_value).__name__))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001936 initial_value = str(initial_value)
1937 self.write(initial_value)
1938 self.seek(0)
1939
1940 def getvalue(self):
1941 self.flush()
1942 return self.buffer.getvalue().decode(self._encoding, self._errors)
Benjamin Peterson9fd459a2009-03-09 00:09:44 +00001943
1944 def __repr__(self):
1945 # TextIOWrapper tells the encoding in its repr. In StringIO,
1946 # that's a implementation detail.
1947 return object.__repr__(self)
Benjamin Petersonb487e632009-03-21 03:08:31 +00001948
1949 @property
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001950 def errors(self):
1951 return None
1952
1953 @property
Benjamin Petersonb487e632009-03-21 03:08:31 +00001954 def encoding(self):
1955 return None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001956
1957 def detach(self):
1958 # This doesn't make sense on StringIO.
1959 self._unsupported("detach")