blob: 66f990d79b41b9572bd9358e0f72ffb178dfc95b [file] [log] [blame]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001"""
2Python implementation of the io module.
3"""
4
5import os
6import abc
7import codecs
Benjamin Peterson59406a92009-03-26 17:10:29 +00008import warnings
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00009# Import _thread instead of threading to reduce startup cost
10try:
11 from _thread import allocate_lock as Lock
12except ImportError:
13 from _dummy_thread import allocate_lock as Lock
14
15import io
Benjamin Petersonc3be11a2010-04-27 21:24:03 +000016from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000017
18# open() uses st_blksize whenever we can
19DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
20
21# NOTE: Base classes defined here are registered with the "official" ABCs
22# defined in io.py. We don't use real inheritance though, because we don't
23# want to inherit the C implementations.
24
25
26class BlockingIOError(IOError):
27
28 """Exception raised when I/O would block on a non-blocking I/O stream."""
29
30 def __init__(self, errno, strerror, characters_written=0):
31 super().__init__(errno, strerror)
32 if not isinstance(characters_written, int):
33 raise TypeError("characters_written must be a integer")
34 self.characters_written = characters_written
35
36
Benjamin Peterson95e392c2010-04-27 21:07:21 +000037def open(file: (str, bytes), mode: str = "r", buffering: int = -1,
Benjamin Peterson9990e8c2009-04-18 14:47:50 +000038 encoding: str = None, errors: str = None,
39 newline: str = None, closefd: bool = True) -> "IOBase":
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000040
41 r"""Open file and return a stream. Raise IOError upon failure.
42
43 file is either a text or byte string giving the name (and the path
44 if the file isn't in the current working directory) of the file to
45 be opened or an integer file descriptor of the file to be
46 wrapped. (If a file descriptor is given, it is closed when the
47 returned I/O object is closed, unless closefd is set to False.)
48
49 mode is an optional string that specifies the mode in which the file
50 is opened. It defaults to 'r' which means open for reading in text
51 mode. Other common values are 'w' for writing (truncating the file if
52 it already exists), and 'a' for appending (which on some Unix systems,
53 means that all writes append to the end of the file regardless of the
54 current seek position). In text mode, if encoding is not specified the
55 encoding used is platform dependent. (For reading and writing raw
56 bytes use binary mode and leave encoding unspecified.) The available
57 modes are:
58
59 ========= ===============================================================
60 Character Meaning
61 --------- ---------------------------------------------------------------
62 'r' open for reading (default)
63 'w' open for writing, truncating the file first
64 'a' open for writing, appending to the end of the file if it exists
65 'b' binary mode
66 't' text mode (default)
67 '+' open a disk file for updating (reading and writing)
68 'U' universal newline mode (for backwards compatibility; unneeded
69 for new code)
70 ========= ===============================================================
71
72 The default mode is 'rt' (open for reading text). For binary random
73 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
74 'r+b' opens the file without truncation.
75
76 Python distinguishes between files opened in binary and text modes,
77 even when the underlying operating system doesn't. Files opened in
78 binary mode (appending 'b' to the mode argument) return contents as
79 bytes objects without any decoding. In text mode (the default, or when
80 't' is appended to the mode argument), the contents of the file are
81 returned as strings, the bytes having been first decoded using a
82 platform-dependent encoding or using the specified encoding if given.
83
Antoine Pitroud5587bc2009-12-19 21:08:31 +000084 buffering is an optional integer used to set the buffering policy.
85 Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
86 line buffering (only usable in text mode), and an integer > 1 to indicate
87 the size of a fixed-size chunk buffer. When no buffering argument is
88 given, the default buffering policy works as follows:
89
90 * Binary files are buffered in fixed-size chunks; the size of the buffer
91 is chosen using a heuristic trying to determine the underlying device's
92 "block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
93 On many systems, the buffer will typically be 4096 or 8192 bytes long.
94
95 * "Interactive" text files (files for which isatty() returns True)
96 use line buffering. Other text files use the policy described above
97 for binary files.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000098
99 encoding is the name of the encoding used to decode or encode the
100 file. This should only be used in text mode. The default encoding is
101 platform dependent, but any encoding supported by Python can be
102 passed. See the codecs module for the list of supported encodings.
103
104 errors is an optional string that specifies how encoding errors are to
105 be handled---this argument should not be used in binary mode. Pass
106 'strict' to raise a ValueError exception if there is an encoding error
107 (the default of None has the same effect), or pass 'ignore' to ignore
108 errors. (Note that ignoring encoding errors can lead to data loss.)
109 See the documentation for codecs.register for a list of the permitted
110 encoding error strings.
111
112 newline controls how universal newlines works (it only applies to text
113 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
114 follows:
115
116 * On input, if newline is None, universal newlines mode is
117 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
118 these are translated into '\n' before being returned to the
119 caller. If it is '', universal newline mode is enabled, but line
120 endings are returned to the caller untranslated. If it has any of
121 the other legal values, input lines are only terminated by the given
122 string, and the line ending is returned to the caller untranslated.
123
124 * On output, if newline is None, any '\n' characters written are
125 translated to the system default line separator, os.linesep. If
126 newline is '', no translation takes place. If newline is any of the
127 other legal values, any '\n' characters written are translated to
128 the given string.
129
130 If closefd is False, the underlying file descriptor will be kept open
131 when the file is closed. This does not work when a file name is given
132 and must be True in that case.
133
134 open() returns a file object whose type depends on the mode, and
135 through which the standard file operations such as reading and writing
136 are performed. When open() is used to open a file in a text mode ('w',
137 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
138 a file in a binary mode, the returned class varies: in read binary
139 mode, it returns a BufferedReader; in write binary and append binary
140 modes, it returns a BufferedWriter, and in read/write mode, it returns
141 a BufferedRandom.
142
143 It is also possible to use a string or bytearray as a file for both
144 reading and writing. For strings StringIO can be used like a file
145 opened in a text mode, and for bytes a BytesIO can be used like a file
146 opened in a binary mode.
147 """
148 if not isinstance(file, (str, bytes, int)):
149 raise TypeError("invalid file: %r" % file)
150 if not isinstance(mode, str):
151 raise TypeError("invalid mode: %r" % mode)
Benjamin Peterson95e392c2010-04-27 21:07:21 +0000152 if not isinstance(buffering, int):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000153 raise TypeError("invalid buffering: %r" % buffering)
154 if encoding is not None and not isinstance(encoding, str):
155 raise TypeError("invalid encoding: %r" % encoding)
156 if errors is not None and not isinstance(errors, str):
157 raise TypeError("invalid errors: %r" % errors)
158 modes = set(mode)
159 if modes - set("arwb+tU") or len(mode) > len(modes):
160 raise ValueError("invalid mode: %r" % mode)
161 reading = "r" in modes
162 writing = "w" in modes
163 appending = "a" in modes
164 updating = "+" in modes
165 text = "t" in modes
166 binary = "b" in modes
167 if "U" in modes:
168 if writing or appending:
169 raise ValueError("can't use U and writing mode at once")
170 reading = True
171 if text and binary:
172 raise ValueError("can't have text and binary mode at once")
173 if reading + writing + appending > 1:
174 raise ValueError("can't have read/write/append mode at once")
175 if not (reading or writing or appending):
176 raise ValueError("must have exactly one of read/write/append mode")
177 if binary and encoding is not None:
178 raise ValueError("binary mode doesn't take an encoding argument")
179 if binary and errors is not None:
180 raise ValueError("binary mode doesn't take an errors argument")
181 if binary and newline is not None:
182 raise ValueError("binary mode doesn't take a newline argument")
183 raw = FileIO(file,
184 (reading and "r" or "") +
185 (writing and "w" or "") +
186 (appending and "a" or "") +
187 (updating and "+" or ""),
188 closefd)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000189 line_buffering = False
190 if buffering == 1 or buffering < 0 and raw.isatty():
191 buffering = -1
192 line_buffering = True
193 if buffering < 0:
194 buffering = DEFAULT_BUFFER_SIZE
195 try:
196 bs = os.fstat(raw.fileno()).st_blksize
197 except (os.error, AttributeError):
198 pass
199 else:
200 if bs > 1:
201 buffering = bs
202 if buffering < 0:
203 raise ValueError("invalid buffering size")
204 if buffering == 0:
205 if binary:
206 return raw
207 raise ValueError("can't have unbuffered text I/O")
208 if updating:
209 buffer = BufferedRandom(raw, buffering)
210 elif writing or appending:
211 buffer = BufferedWriter(raw, buffering)
212 elif reading:
213 buffer = BufferedReader(raw, buffering)
214 else:
215 raise ValueError("unknown mode: %r" % mode)
216 if binary:
217 return buffer
218 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
219 text.mode = mode
220 return text
221
222
223class DocDescriptor:
224 """Helper for builtins.open.__doc__
225 """
226 def __get__(self, obj, typ):
227 return (
Benjamin Petersonc3be11a2010-04-27 21:24:03 +0000228 "open(file, mode='r', buffering=-1, encoding=None, "
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000229 "errors=None, newline=None, closefd=True)\n\n" +
230 open.__doc__)
231
232class OpenWrapper:
233 """Wrapper for builtins.open
234
235 Trick so that open won't become a bound method when stored
236 as a class variable (as dbm.dumb does).
237
238 See initstdio() in Python/pythonrun.c.
239 """
240 __doc__ = DocDescriptor()
241
242 def __new__(cls, *args, **kwargs):
243 return open(*args, **kwargs)
244
245
246class UnsupportedOperation(ValueError, IOError):
247 pass
248
249
250class IOBase(metaclass=abc.ABCMeta):
251
252 """The abstract base class for all I/O classes, acting on streams of
253 bytes. There is no public constructor.
254
255 This class provides dummy implementations for many methods that
256 derived classes can override selectively; the default implementations
257 represent a file that cannot be read, written or seeked.
258
259 Even though IOBase does not declare read, readinto, or write because
260 their signatures will vary, implementations and clients should
261 consider those methods part of the interface. Also, implementations
262 may raise a IOError when operations they do not support are called.
263
264 The basic type used for binary data read from or written to a file is
265 bytes. bytearrays are accepted too, and in some cases (such as
266 readinto) needed. Text I/O classes work with str data.
267
268 Note that calling any method (even inquiries) on a closed stream is
269 undefined. Implementations may raise IOError in this case.
270
271 IOBase (and its subclasses) support the iterator protocol, meaning
272 that an IOBase object can be iterated over yielding the lines in a
273 stream.
274
275 IOBase also supports the :keyword:`with` statement. In this example,
276 fp is closed after the suite of the with statement is complete:
277
278 with open('spam.txt', 'r') as fp:
279 fp.write('Spam and eggs!')
280 """
281
282 ### Internal ###
283
284 def _unsupported(self, name: str) -> IOError:
285 """Internal: raise an exception for unsupported operations."""
286 raise UnsupportedOperation("%s.%s() not supported" %
287 (self.__class__.__name__, name))
288
289 ### Positioning ###
290
291 def seek(self, pos: int, whence: int = 0) -> int:
292 """Change stream position.
293
294 Change the stream position to byte offset offset. offset is
295 interpreted relative to the position indicated by whence. Values
296 for whence are:
297
298 * 0 -- start of stream (the default); offset should be zero or positive
299 * 1 -- current stream position; offset may be negative
300 * 2 -- end of stream; offset is usually negative
301
302 Return the new absolute position.
303 """
304 self._unsupported("seek")
305
306 def tell(self) -> int:
307 """Return current stream position."""
308 return self.seek(0, 1)
309
310 def truncate(self, pos: int = None) -> int:
311 """Truncate file to size bytes.
312
313 Size defaults to the current IO position as reported by tell(). Return
314 the new size.
315 """
316 self._unsupported("truncate")
317
318 ### Flush and close ###
319
320 def flush(self) -> None:
321 """Flush write buffers, if applicable.
322
323 This is not implemented for read-only and non-blocking streams.
324 """
Antoine Pitrou6be88762010-05-03 16:48:20 +0000325 self._checkClosed()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000326 # XXX Should this return the number of bytes written???
327
328 __closed = False
329
330 def close(self) -> None:
331 """Flush and close the IO object.
332
333 This method has no effect if the file is already closed.
334 """
335 if not self.__closed:
Antoine Pitrou6be88762010-05-03 16:48:20 +0000336 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000337 self.__closed = True
338
339 def __del__(self) -> None:
340 """Destructor. Calls close()."""
341 # The try/except block is in case this is called at program
342 # exit time, when it's possible that globals have already been
343 # deleted, and then the close() call might fail. Since
344 # there's nothing we can do about such failures and they annoy
345 # the end users, we suppress the traceback.
346 try:
347 self.close()
348 except:
349 pass
350
351 ### Inquiries ###
352
353 def seekable(self) -> bool:
354 """Return whether object supports random access.
355
356 If False, seek(), tell() and truncate() will raise IOError.
357 This method may need to do a test seek().
358 """
359 return False
360
361 def _checkSeekable(self, msg=None):
362 """Internal: raise an IOError if file is not seekable
363 """
364 if not self.seekable():
365 raise IOError("File or stream is not seekable."
366 if msg is None else msg)
367
368
369 def readable(self) -> bool:
370 """Return whether object was opened for reading.
371
372 If False, read() will raise IOError.
373 """
374 return False
375
376 def _checkReadable(self, msg=None):
377 """Internal: raise an IOError if file is not readable
378 """
379 if not self.readable():
380 raise IOError("File or stream is not readable."
381 if msg is None else msg)
382
383 def writable(self) -> bool:
384 """Return whether object was opened for writing.
385
386 If False, write() and truncate() will raise IOError.
387 """
388 return False
389
390 def _checkWritable(self, msg=None):
391 """Internal: raise an IOError if file is not writable
392 """
393 if not self.writable():
394 raise IOError("File or stream is not writable."
395 if msg is None else msg)
396
397 @property
398 def closed(self):
399 """closed: bool. True iff the file has been closed.
400
401 For backwards compatibility, this is a property, not a predicate.
402 """
403 return self.__closed
404
405 def _checkClosed(self, msg=None):
406 """Internal: raise an ValueError if file is closed
407 """
408 if self.closed:
409 raise ValueError("I/O operation on closed file."
410 if msg is None else msg)
411
412 ### Context manager ###
413
414 def __enter__(self) -> "IOBase": # That's a forward reference
415 """Context management protocol. Returns self."""
416 self._checkClosed()
417 return self
418
419 def __exit__(self, *args) -> None:
420 """Context management protocol. Calls close()"""
421 self.close()
422
423 ### Lower-level APIs ###
424
425 # XXX Should these be present even if unimplemented?
426
427 def fileno(self) -> int:
428 """Returns underlying file descriptor if one exists.
429
430 An IOError is raised if the IO object does not use a file descriptor.
431 """
432 self._unsupported("fileno")
433
434 def isatty(self) -> bool:
435 """Return whether this is an 'interactive' stream.
436
437 Return False if it can't be determined.
438 """
439 self._checkClosed()
440 return False
441
442 ### Readline[s] and writelines ###
443
444 def readline(self, limit: int = -1) -> bytes:
445 r"""Read and return a line from the stream.
446
447 If limit is specified, at most limit bytes will be read.
448
449 The line terminator is always b'\n' for binary files; for text
450 files, the newlines argument to open can be used to select the line
451 terminator(s) recognized.
452 """
453 # For backwards compatibility, a (slowish) readline().
454 if hasattr(self, "peek"):
455 def nreadahead():
456 readahead = self.peek(1)
457 if not readahead:
458 return 1
459 n = (readahead.find(b"\n") + 1) or len(readahead)
460 if limit >= 0:
461 n = min(n, limit)
462 return n
463 else:
464 def nreadahead():
465 return 1
466 if limit is None:
467 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +0000468 elif not isinstance(limit, int):
469 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000470 res = bytearray()
471 while limit < 0 or len(res) < limit:
472 b = self.read(nreadahead())
473 if not b:
474 break
475 res += b
476 if res.endswith(b"\n"):
477 break
478 return bytes(res)
479
480 def __iter__(self):
481 self._checkClosed()
482 return self
483
484 def __next__(self):
485 line = self.readline()
486 if not line:
487 raise StopIteration
488 return line
489
490 def readlines(self, hint=None):
491 """Return a list of lines from the stream.
492
493 hint can be specified to control the number of lines read: no more
494 lines will be read if the total size (in bytes/characters) of all
495 lines so far exceeds hint.
496 """
497 if hint is None or hint <= 0:
498 return list(self)
499 n = 0
500 lines = []
501 for line in self:
502 lines.append(line)
503 n += len(line)
504 if n >= hint:
505 break
506 return lines
507
508 def writelines(self, lines):
509 self._checkClosed()
510 for line in lines:
511 self.write(line)
512
513io.IOBase.register(IOBase)
514
515
516class RawIOBase(IOBase):
517
518 """Base class for raw binary I/O."""
519
520 # The read() method is implemented by calling readinto(); derived
521 # classes that want to support read() only need to implement
522 # readinto() as a primitive operation. In general, readinto() can be
523 # more efficient than read().
524
525 # (It would be tempting to also provide an implementation of
526 # readinto() in terms of read(), in case the latter is a more suitable
527 # primitive operation, but that would lead to nasty recursion in case
528 # a subclass doesn't implement either.)
529
530 def read(self, n: int = -1) -> bytes:
531 """Read and return up to n bytes.
532
533 Returns an empty bytes object on EOF, or None if the object is
534 set not to block and has no data to read.
535 """
536 if n is None:
537 n = -1
538 if n < 0:
539 return self.readall()
540 b = bytearray(n.__index__())
541 n = self.readinto(b)
542 del b[n:]
543 return bytes(b)
544
545 def readall(self):
546 """Read until EOF, using multiple read() call."""
547 res = bytearray()
548 while True:
549 data = self.read(DEFAULT_BUFFER_SIZE)
550 if not data:
551 break
552 res += data
553 return bytes(res)
554
555 def readinto(self, b: bytearray) -> int:
556 """Read up to len(b) bytes into b.
557
558 Returns number of bytes read (0 for EOF), or None if the object
559 is set not to block as has no data to read.
560 """
561 self._unsupported("readinto")
562
563 def write(self, b: bytes) -> int:
564 """Write the given buffer to the IO stream.
565
566 Returns the number of bytes written, which may be less than len(b).
567 """
568 self._unsupported("write")
569
570io.RawIOBase.register(RawIOBase)
571from _io import FileIO
572RawIOBase.register(FileIO)
573
574
575class BufferedIOBase(IOBase):
576
577 """Base class for buffered IO objects.
578
579 The main difference with RawIOBase is that the read() method
580 supports omitting the size argument, and does not have a default
581 implementation that defers to readinto().
582
583 In addition, read(), readinto() and write() may raise
584 BlockingIOError if the underlying raw stream is in non-blocking
585 mode and not ready; unlike their raw counterparts, they will never
586 return None.
587
588 A typical implementation should not inherit from a RawIOBase
589 implementation, but wrap one.
590 """
591
592 def read(self, n: int = None) -> bytes:
593 """Read and return up to n bytes.
594
595 If the argument is omitted, None, or negative, reads and
596 returns all data until EOF.
597
598 If the argument is positive, and the underlying raw stream is
599 not 'interactive', multiple raw reads may be issued to satisfy
600 the byte count (unless EOF is reached first). But for
601 interactive raw streams (XXX and for pipes?), at most one raw
602 read will be issued, and a short result does not imply that
603 EOF is imminent.
604
605 Returns an empty bytes array on EOF.
606
607 Raises BlockingIOError if the underlying raw stream has no
608 data at the moment.
609 """
610 self._unsupported("read")
611
612 def read1(self, n: int=None) -> bytes:
613 """Read up to n bytes with at most one read() system call."""
614 self._unsupported("read1")
615
616 def readinto(self, b: bytearray) -> int:
617 """Read up to len(b) bytes into b.
618
619 Like read(), this may issue multiple reads to the underlying raw
620 stream, unless the latter is 'interactive'.
621
622 Returns the number of bytes read (0 for EOF).
623
624 Raises BlockingIOError if the underlying raw stream has no
625 data at the moment.
626 """
627 # XXX This ought to work with anything that supports the buffer API
628 data = self.read(len(b))
629 n = len(data)
630 try:
631 b[:n] = data
632 except TypeError as err:
633 import array
634 if not isinstance(b, array.array):
635 raise err
636 b[:n] = array.array('b', data)
637 return n
638
639 def write(self, b: bytes) -> int:
640 """Write the given buffer to the IO stream.
641
642 Return the number of bytes written, which is never less than
643 len(b).
644
645 Raises BlockingIOError if the buffer is full and the
646 underlying raw stream cannot accept more data at the moment.
647 """
648 self._unsupported("write")
649
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000650 def detach(self) -> None:
651 """
652 Separate the underlying raw stream from the buffer and return it.
653
654 After the raw stream has been detached, the buffer is in an unusable
655 state.
656 """
657 self._unsupported("detach")
658
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000659io.BufferedIOBase.register(BufferedIOBase)
660
661
662class _BufferedIOMixin(BufferedIOBase):
663
664 """A mixin implementation of BufferedIOBase with an underlying raw stream.
665
666 This passes most requests on to the underlying raw stream. It
667 does *not* provide implementations of read(), readinto() or
668 write().
669 """
670
671 def __init__(self, raw):
672 self.raw = raw
673
674 ### Positioning ###
675
676 def seek(self, pos, whence=0):
677 new_position = self.raw.seek(pos, whence)
678 if new_position < 0:
679 raise IOError("seek() returned an invalid position")
680 return new_position
681
682 def tell(self):
683 pos = self.raw.tell()
684 if pos < 0:
685 raise IOError("tell() returned an invalid position")
686 return pos
687
688 def truncate(self, pos=None):
689 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
690 # and a flush may be necessary to synch both views of the current
691 # file state.
692 self.flush()
693
694 if pos is None:
695 pos = self.tell()
696 # XXX: Should seek() be used, instead of passing the position
697 # XXX directly to truncate?
698 return self.raw.truncate(pos)
699
700 ### Flush and close ###
701
702 def flush(self):
Antoine Pitrou6be88762010-05-03 16:48:20 +0000703 if self.closed:
704 raise ValueError("flush of closed file")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000705 self.raw.flush()
706
707 def close(self):
Antoine Pitrou6be88762010-05-03 16:48:20 +0000708 if self.raw is not None and not self.closed:
709 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000710 self.raw.close()
711
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000712 def detach(self):
713 if self.raw is None:
714 raise ValueError("raw stream already detached")
715 self.flush()
716 raw = self.raw
717 self.raw = None
718 return raw
719
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000720 ### Inquiries ###
721
722 def seekable(self):
723 return self.raw.seekable()
724
725 def readable(self):
726 return self.raw.readable()
727
728 def writable(self):
729 return self.raw.writable()
730
731 @property
732 def closed(self):
733 return self.raw.closed
734
735 @property
736 def name(self):
737 return self.raw.name
738
739 @property
740 def mode(self):
741 return self.raw.mode
742
Antoine Pitrou716c4442009-05-23 19:04:03 +0000743 def __repr__(self):
744 clsname = self.__class__.__name__
745 try:
746 name = self.name
747 except AttributeError:
748 return "<_pyio.{0}>".format(clsname)
749 else:
750 return "<_pyio.{0} name={1!r}>".format(clsname, name)
751
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000752 ### Lower-level APIs ###
753
754 def fileno(self):
755 return self.raw.fileno()
756
757 def isatty(self):
758 return self.raw.isatty()
759
760
761class BytesIO(BufferedIOBase):
762
763 """Buffered I/O implementation using an in-memory bytes buffer."""
764
765 def __init__(self, initial_bytes=None):
766 buf = bytearray()
767 if initial_bytes is not None:
768 buf += initial_bytes
769 self._buffer = buf
770 self._pos = 0
771
Alexandre Vassalotticf76e1a2009-07-22 03:24:36 +0000772 def __getstate__(self):
773 if self.closed:
774 raise ValueError("__getstate__ on closed file")
775 return self.__dict__.copy()
776
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000777 def getvalue(self):
778 """Return the bytes value (contents) of the buffer
779 """
780 if self.closed:
781 raise ValueError("getvalue on closed file")
782 return bytes(self._buffer)
783
784 def read(self, n=None):
785 if self.closed:
786 raise ValueError("read from closed file")
787 if n is None:
788 n = -1
789 if n < 0:
790 n = len(self._buffer)
791 if len(self._buffer) <= self._pos:
792 return b""
793 newpos = min(len(self._buffer), self._pos + n)
794 b = self._buffer[self._pos : newpos]
795 self._pos = newpos
796 return bytes(b)
797
798 def read1(self, n):
799 """This is the same as read.
800 """
801 return self.read(n)
802
803 def write(self, b):
804 if self.closed:
805 raise ValueError("write to closed file")
806 if isinstance(b, str):
807 raise TypeError("can't write str to binary stream")
808 n = len(b)
809 if n == 0:
810 return 0
811 pos = self._pos
812 if pos > len(self._buffer):
813 # Inserts null bytes between the current end of the file
814 # and the new write position.
815 padding = b'\x00' * (pos - len(self._buffer))
816 self._buffer += padding
817 self._buffer[pos:pos + n] = b
818 self._pos += n
819 return n
820
821 def seek(self, pos, whence=0):
822 if self.closed:
823 raise ValueError("seek on closed file")
824 try:
Florent Xiclunab14930c2010-03-13 15:26:44 +0000825 pos.__index__
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000826 except AttributeError as err:
827 raise TypeError("an integer is required") from err
828 if whence == 0:
829 if pos < 0:
830 raise ValueError("negative seek position %r" % (pos,))
831 self._pos = pos
832 elif whence == 1:
833 self._pos = max(0, self._pos + pos)
834 elif whence == 2:
835 self._pos = max(0, len(self._buffer) + pos)
836 else:
837 raise ValueError("invalid whence value")
838 return self._pos
839
840 def tell(self):
841 if self.closed:
842 raise ValueError("tell on closed file")
843 return self._pos
844
845 def truncate(self, pos=None):
846 if self.closed:
847 raise ValueError("truncate on closed file")
848 if pos is None:
849 pos = self._pos
Florent Xiclunab14930c2010-03-13 15:26:44 +0000850 else:
851 try:
852 pos.__index__
853 except AttributeError as err:
854 raise TypeError("an integer is required") from err
855 if pos < 0:
856 raise ValueError("negative truncate position %r" % (pos,))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000857 del self._buffer[pos:]
Antoine Pitrou905a2ff2010-01-31 22:47:27 +0000858 return pos
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000859
860 def readable(self):
861 return True
862
863 def writable(self):
864 return True
865
866 def seekable(self):
867 return True
868
869
870class BufferedReader(_BufferedIOMixin):
871
872 """BufferedReader(raw[, buffer_size])
873
874 A buffer for a readable, sequential BaseRawIO object.
875
876 The constructor creates a BufferedReader for the given readable raw
877 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
878 is used.
879 """
880
881 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
882 """Create a new buffered reader using the given readable raw IO object.
883 """
Antoine Pitroucf4c7492009-04-19 00:09:36 +0000884 if not raw.readable():
885 raise IOError('"raw" argument must be readable.')
886
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000887 _BufferedIOMixin.__init__(self, raw)
888 if buffer_size <= 0:
889 raise ValueError("invalid buffer size")
890 self.buffer_size = buffer_size
891 self._reset_read_buf()
892 self._read_lock = Lock()
893
894 def _reset_read_buf(self):
895 self._read_buf = b""
896 self._read_pos = 0
897
898 def read(self, n=None):
899 """Read n bytes.
900
901 Returns exactly n bytes of data unless the underlying raw IO
902 stream reaches EOF or if the call would block in non-blocking
903 mode. If n is negative, read until EOF or until read() would
904 block.
905 """
906 if n is not None and n < -1:
907 raise ValueError("invalid number of bytes to read")
908 with self._read_lock:
909 return self._read_unlocked(n)
910
911 def _read_unlocked(self, n=None):
912 nodata_val = b""
913 empty_values = (b"", None)
914 buf = self._read_buf
915 pos = self._read_pos
916
917 # Special case for when the number of bytes to read is unspecified.
918 if n is None or n == -1:
919 self._reset_read_buf()
920 chunks = [buf[pos:]] # Strip the consumed bytes.
921 current_size = 0
922 while True:
923 # Read until EOF or until read() would block.
924 chunk = self.raw.read()
925 if chunk in empty_values:
926 nodata_val = chunk
927 break
928 current_size += len(chunk)
929 chunks.append(chunk)
930 return b"".join(chunks) or nodata_val
931
932 # The number of bytes to read is specified, return at most n bytes.
933 avail = len(buf) - pos # Length of the available buffered data.
934 if n <= avail:
935 # Fast path: the data to read is fully buffered.
936 self._read_pos += n
937 return buf[pos:pos+n]
938 # Slow path: read from the stream until enough bytes are read,
939 # or until an EOF occurs or until read() would block.
940 chunks = [buf[pos:]]
941 wanted = max(self.buffer_size, n)
942 while avail < n:
943 chunk = self.raw.read(wanted)
944 if chunk in empty_values:
945 nodata_val = chunk
946 break
947 avail += len(chunk)
948 chunks.append(chunk)
949 # n is more then avail only when an EOF occurred or when
950 # read() would have blocked.
951 n = min(n, avail)
952 out = b"".join(chunks)
953 self._read_buf = out[n:] # Save the extra data in the buffer.
954 self._read_pos = 0
955 return out[:n] if out else nodata_val
956
957 def peek(self, n=0):
958 """Returns buffered bytes without advancing the position.
959
960 The argument indicates a desired minimal number of bytes; we
961 do at most one raw read to satisfy it. We never return more
962 than self.buffer_size.
963 """
964 with self._read_lock:
965 return self._peek_unlocked(n)
966
967 def _peek_unlocked(self, n=0):
968 want = min(n, self.buffer_size)
969 have = len(self._read_buf) - self._read_pos
970 if have < want or have <= 0:
971 to_read = self.buffer_size - have
972 current = self.raw.read(to_read)
973 if current:
974 self._read_buf = self._read_buf[self._read_pos:] + current
975 self._read_pos = 0
976 return self._read_buf[self._read_pos:]
977
978 def read1(self, n):
979 """Reads up to n bytes, with at most one read() system call."""
980 # Returns up to n bytes. If at least one byte is buffered, we
981 # only return buffered bytes. Otherwise, we do one raw read.
982 if n < 0:
983 raise ValueError("number of bytes to read must be positive")
984 if n == 0:
985 return b""
986 with self._read_lock:
987 self._peek_unlocked(1)
988 return self._read_unlocked(
989 min(n, len(self._read_buf) - self._read_pos))
990
991 def tell(self):
992 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
993
994 def seek(self, pos, whence=0):
995 if not (0 <= whence <= 2):
996 raise ValueError("invalid whence value")
997 with self._read_lock:
998 if whence == 1:
999 pos -= len(self._read_buf) - self._read_pos
1000 pos = _BufferedIOMixin.seek(self, pos, whence)
1001 self._reset_read_buf()
1002 return pos
1003
1004class BufferedWriter(_BufferedIOMixin):
1005
1006 """A buffer for a writeable sequential RawIO object.
1007
1008 The constructor creates a BufferedWriter for the given writeable raw
1009 stream. If the buffer_size is not given, it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001010 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001011 """
1012
Benjamin Peterson59406a92009-03-26 17:10:29 +00001013 _warning_stack_offset = 2
1014
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001015 def __init__(self, raw,
1016 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001017 if not raw.writable():
1018 raise IOError('"raw" argument must be writable.')
1019
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001020 _BufferedIOMixin.__init__(self, raw)
1021 if buffer_size <= 0:
1022 raise ValueError("invalid buffer size")
Benjamin Peterson59406a92009-03-26 17:10:29 +00001023 if max_buffer_size is not None:
1024 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1025 self._warning_stack_offset)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001026 self.buffer_size = buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001027 self._write_buf = bytearray()
1028 self._write_lock = Lock()
1029
1030 def write(self, b):
1031 if self.closed:
1032 raise ValueError("write to closed file")
1033 if isinstance(b, str):
1034 raise TypeError("can't write str to binary stream")
1035 with self._write_lock:
1036 # XXX we can implement some more tricks to try and avoid
1037 # partial writes
1038 if len(self._write_buf) > self.buffer_size:
1039 # We're full, so let's pre-flush the buffer
1040 try:
1041 self._flush_unlocked()
1042 except BlockingIOError as e:
1043 # We can't accept anything else.
1044 # XXX Why not just let the exception pass through?
1045 raise BlockingIOError(e.errno, e.strerror, 0)
1046 before = len(self._write_buf)
1047 self._write_buf.extend(b)
1048 written = len(self._write_buf) - before
1049 if len(self._write_buf) > self.buffer_size:
1050 try:
1051 self._flush_unlocked()
1052 except BlockingIOError as e:
Benjamin Peterson394ee002009-03-05 22:33:59 +00001053 if len(self._write_buf) > self.buffer_size:
1054 # We've hit the buffer_size. We have to accept a partial
1055 # write and cut back our buffer.
1056 overage = len(self._write_buf) - self.buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001057 written -= overage
Benjamin Peterson394ee002009-03-05 22:33:59 +00001058 self._write_buf = self._write_buf[:self.buffer_size]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001059 raise BlockingIOError(e.errno, e.strerror, written)
1060 return written
1061
1062 def truncate(self, pos=None):
1063 with self._write_lock:
1064 self._flush_unlocked()
1065 if pos is None:
1066 pos = self.raw.tell()
1067 return self.raw.truncate(pos)
1068
1069 def flush(self):
1070 with self._write_lock:
1071 self._flush_unlocked()
1072
1073 def _flush_unlocked(self):
1074 if self.closed:
1075 raise ValueError("flush of closed file")
1076 written = 0
1077 try:
1078 while self._write_buf:
1079 n = self.raw.write(self._write_buf)
1080 if n > len(self._write_buf) or n < 0:
1081 raise IOError("write() returned incorrect number of bytes")
1082 del self._write_buf[:n]
1083 written += n
1084 except BlockingIOError as e:
1085 n = e.characters_written
1086 del self._write_buf[:n]
1087 written += n
1088 raise BlockingIOError(e.errno, e.strerror, written)
1089
1090 def tell(self):
1091 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1092
1093 def seek(self, pos, whence=0):
1094 if not (0 <= whence <= 2):
1095 raise ValueError("invalid whence")
1096 with self._write_lock:
1097 self._flush_unlocked()
1098 return _BufferedIOMixin.seek(self, pos, whence)
1099
1100
1101class BufferedRWPair(BufferedIOBase):
1102
1103 """A buffered reader and writer object together.
1104
1105 A buffered reader object and buffered writer object put together to
1106 form a sequential IO object that can read and write. This is typically
1107 used with a socket or two-way pipe.
1108
1109 reader and writer are RawIOBase objects that are readable and
1110 writeable respectively. If the buffer_size is omitted it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001111 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001112 """
1113
1114 # XXX The usefulness of this (compared to having two separate IO
1115 # objects) is questionable.
1116
1117 def __init__(self, reader, writer,
1118 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1119 """Constructor.
1120
1121 The arguments are two RawIO instances.
1122 """
Benjamin Peterson59406a92009-03-26 17:10:29 +00001123 if max_buffer_size is not None:
1124 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001125
1126 if not reader.readable():
1127 raise IOError('"reader" argument must be readable.')
1128
1129 if not writer.writable():
1130 raise IOError('"writer" argument must be writable.')
1131
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001132 self.reader = BufferedReader(reader, buffer_size)
Benjamin Peterson59406a92009-03-26 17:10:29 +00001133 self.writer = BufferedWriter(writer, buffer_size)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001134
1135 def read(self, n=None):
1136 if n is None:
1137 n = -1
1138 return self.reader.read(n)
1139
1140 def readinto(self, b):
1141 return self.reader.readinto(b)
1142
1143 def write(self, b):
1144 return self.writer.write(b)
1145
1146 def peek(self, n=0):
1147 return self.reader.peek(n)
1148
1149 def read1(self, n):
1150 return self.reader.read1(n)
1151
1152 def readable(self):
1153 return self.reader.readable()
1154
1155 def writable(self):
1156 return self.writer.writable()
1157
1158 def flush(self):
1159 return self.writer.flush()
1160
1161 def close(self):
1162 self.writer.close()
1163 self.reader.close()
1164
1165 def isatty(self):
1166 return self.reader.isatty() or self.writer.isatty()
1167
1168 @property
1169 def closed(self):
1170 return self.writer.closed
1171
1172
1173class BufferedRandom(BufferedWriter, BufferedReader):
1174
1175 """A buffered interface to random access streams.
1176
1177 The constructor creates a reader and writer for a seekable stream,
1178 raw, given in the first argument. If the buffer_size is omitted it
Benjamin Peterson59406a92009-03-26 17:10:29 +00001179 defaults to DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001180 """
1181
Benjamin Peterson59406a92009-03-26 17:10:29 +00001182 _warning_stack_offset = 3
1183
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001184 def __init__(self, raw,
1185 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1186 raw._checkSeekable()
1187 BufferedReader.__init__(self, raw, buffer_size)
1188 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1189
1190 def seek(self, pos, whence=0):
1191 if not (0 <= whence <= 2):
1192 raise ValueError("invalid whence")
1193 self.flush()
1194 if self._read_buf:
1195 # Undo read ahead.
1196 with self._read_lock:
1197 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1198 # First do the raw seek, then empty the read buffer, so that
1199 # if the raw seek fails, we don't lose buffered data forever.
1200 pos = self.raw.seek(pos, whence)
1201 with self._read_lock:
1202 self._reset_read_buf()
1203 if pos < 0:
1204 raise IOError("seek() returned invalid position")
1205 return pos
1206
1207 def tell(self):
1208 if self._write_buf:
1209 return BufferedWriter.tell(self)
1210 else:
1211 return BufferedReader.tell(self)
1212
1213 def truncate(self, pos=None):
1214 if pos is None:
1215 pos = self.tell()
1216 # Use seek to flush the read buffer.
Antoine Pitrou905a2ff2010-01-31 22:47:27 +00001217 return BufferedWriter.truncate(self, pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001218
1219 def read(self, n=None):
1220 if n is None:
1221 n = -1
1222 self.flush()
1223 return BufferedReader.read(self, n)
1224
1225 def readinto(self, b):
1226 self.flush()
1227 return BufferedReader.readinto(self, b)
1228
1229 def peek(self, n=0):
1230 self.flush()
1231 return BufferedReader.peek(self, n)
1232
1233 def read1(self, n):
1234 self.flush()
1235 return BufferedReader.read1(self, n)
1236
1237 def write(self, b):
1238 if self._read_buf:
1239 # Undo readahead
1240 with self._read_lock:
1241 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1242 self._reset_read_buf()
1243 return BufferedWriter.write(self, b)
1244
1245
1246class TextIOBase(IOBase):
1247
1248 """Base class for text I/O.
1249
1250 This class provides a character and line based interface to stream
1251 I/O. There is no readinto method because Python's character strings
1252 are immutable. There is no public constructor.
1253 """
1254
1255 def read(self, n: int = -1) -> str:
1256 """Read at most n characters from stream.
1257
1258 Read from underlying buffer until we have n characters or we hit EOF.
1259 If n is negative or omitted, read until EOF.
1260 """
1261 self._unsupported("read")
1262
1263 def write(self, s: str) -> int:
1264 """Write string s to stream."""
1265 self._unsupported("write")
1266
1267 def truncate(self, pos: int = None) -> int:
1268 """Truncate size to pos."""
1269 self._unsupported("truncate")
1270
1271 def readline(self) -> str:
1272 """Read until newline or EOF.
1273
1274 Returns an empty string if EOF is hit immediately.
1275 """
1276 self._unsupported("readline")
1277
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001278 def detach(self) -> None:
1279 """
1280 Separate the underlying buffer from the TextIOBase and return it.
1281
1282 After the underlying buffer has been detached, the TextIO is in an
1283 unusable state.
1284 """
1285 self._unsupported("detach")
1286
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001287 @property
1288 def encoding(self):
1289 """Subclasses should override."""
1290 return None
1291
1292 @property
1293 def newlines(self):
1294 """Line endings translated so far.
1295
1296 Only line endings translated during reading are considered.
1297
1298 Subclasses should override.
1299 """
1300 return None
1301
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001302 @property
1303 def errors(self):
1304 """Error setting of the decoder or encoder.
1305
1306 Subclasses should override."""
1307 return None
1308
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001309io.TextIOBase.register(TextIOBase)
1310
1311
1312class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1313 r"""Codec used when reading a file in universal newlines mode. It wraps
1314 another incremental decoder, translating \r\n and \r into \n. It also
1315 records the types of newlines encountered. When used with
1316 translate=False, it ensures that the newline sequence is returned in
1317 one piece.
1318 """
1319 def __init__(self, decoder, translate, errors='strict'):
1320 codecs.IncrementalDecoder.__init__(self, errors=errors)
1321 self.translate = translate
1322 self.decoder = decoder
1323 self.seennl = 0
1324 self.pendingcr = False
1325
1326 def decode(self, input, final=False):
1327 # decode input (with the eventual \r from a previous pass)
1328 if self.decoder is None:
1329 output = input
1330 else:
1331 output = self.decoder.decode(input, final=final)
1332 if self.pendingcr and (output or final):
1333 output = "\r" + output
1334 self.pendingcr = False
1335
1336 # retain last \r even when not translating data:
1337 # then readline() is sure to get \r\n in one pass
1338 if output.endswith("\r") and not final:
1339 output = output[:-1]
1340 self.pendingcr = True
1341
1342 # Record which newlines are read
1343 crlf = output.count('\r\n')
1344 cr = output.count('\r') - crlf
1345 lf = output.count('\n') - crlf
1346 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1347 | (crlf and self._CRLF)
1348
1349 if self.translate:
1350 if crlf:
1351 output = output.replace("\r\n", "\n")
1352 if cr:
1353 output = output.replace("\r", "\n")
1354
1355 return output
1356
1357 def getstate(self):
1358 if self.decoder is None:
1359 buf = b""
1360 flag = 0
1361 else:
1362 buf, flag = self.decoder.getstate()
1363 flag <<= 1
1364 if self.pendingcr:
1365 flag |= 1
1366 return buf, flag
1367
1368 def setstate(self, state):
1369 buf, flag = state
1370 self.pendingcr = bool(flag & 1)
1371 if self.decoder is not None:
1372 self.decoder.setstate((buf, flag >> 1))
1373
1374 def reset(self):
1375 self.seennl = 0
1376 self.pendingcr = False
1377 if self.decoder is not None:
1378 self.decoder.reset()
1379
1380 _LF = 1
1381 _CR = 2
1382 _CRLF = 4
1383
1384 @property
1385 def newlines(self):
1386 return (None,
1387 "\n",
1388 "\r",
1389 ("\r", "\n"),
1390 "\r\n",
1391 ("\n", "\r\n"),
1392 ("\r", "\r\n"),
1393 ("\r", "\n", "\r\n")
1394 )[self.seennl]
1395
1396
1397class TextIOWrapper(TextIOBase):
1398
1399 r"""Character and line based layer over a BufferedIOBase object, buffer.
1400
1401 encoding gives the name of the encoding that the stream will be
1402 decoded or encoded with. It defaults to locale.getpreferredencoding.
1403
1404 errors determines the strictness of encoding and decoding (see the
1405 codecs.register) and defaults to "strict".
1406
1407 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1408 handling of line endings. If it is None, universal newlines is
1409 enabled. With this enabled, on input, the lines endings '\n', '\r',
1410 or '\r\n' are translated to '\n' before being returned to the
1411 caller. Conversely, on output, '\n' is translated to the system
1412 default line seperator, os.linesep. If newline is any other of its
1413 legal values, that newline becomes the newline when the file is read
1414 and it is returned untranslated. On output, '\n' is converted to the
1415 newline.
1416
1417 If line_buffering is True, a call to flush is implied when a call to
1418 write contains a newline character.
1419 """
1420
1421 _CHUNK_SIZE = 2048
1422
1423 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1424 line_buffering=False):
1425 if newline is not None and not isinstance(newline, str):
1426 raise TypeError("illegal newline type: %r" % (type(newline),))
1427 if newline not in (None, "", "\n", "\r", "\r\n"):
1428 raise ValueError("illegal newline value: %r" % (newline,))
1429 if encoding is None:
1430 try:
1431 encoding = os.device_encoding(buffer.fileno())
1432 except (AttributeError, UnsupportedOperation):
1433 pass
1434 if encoding is None:
1435 try:
1436 import locale
1437 except ImportError:
1438 # Importing locale may fail if Python is being built
1439 encoding = "ascii"
1440 else:
1441 encoding = locale.getpreferredencoding()
1442
1443 if not isinstance(encoding, str):
1444 raise ValueError("invalid encoding: %r" % encoding)
1445
1446 if errors is None:
1447 errors = "strict"
1448 else:
1449 if not isinstance(errors, str):
1450 raise ValueError("invalid errors: %r" % errors)
1451
1452 self.buffer = buffer
1453 self._line_buffering = line_buffering
1454 self._encoding = encoding
1455 self._errors = errors
1456 self._readuniversal = not newline
1457 self._readtranslate = newline is None
1458 self._readnl = newline
1459 self._writetranslate = newline != ''
1460 self._writenl = newline or os.linesep
1461 self._encoder = None
1462 self._decoder = None
1463 self._decoded_chars = '' # buffer for text returned from decoder
1464 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1465 self._snapshot = None # info for reconstructing decoder state
1466 self._seekable = self._telling = self.buffer.seekable()
1467
Antoine Pitroue4501852009-05-14 18:55:55 +00001468 if self._seekable and self.writable():
1469 position = self.buffer.tell()
1470 if position != 0:
1471 try:
1472 self._get_encoder().setstate(0)
1473 except LookupError:
1474 # Sometimes the encoder doesn't exist
1475 pass
1476
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001477 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1478 # where dec_flags is the second (integer) item of the decoder state
1479 # and next_input is the chunk of input bytes that comes next after the
1480 # snapshot point. We use this to reconstruct decoder states in tell().
1481
1482 # Naming convention:
1483 # - "bytes_..." for integer variables that count input bytes
1484 # - "chars_..." for integer variables that count decoded characters
1485
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001486 def __repr__(self):
Antoine Pitrou716c4442009-05-23 19:04:03 +00001487 try:
1488 name = self.name
1489 except AttributeError:
1490 return "<_pyio.TextIOWrapper encoding={0!r}>".format(self.encoding)
1491 else:
1492 return "<_pyio.TextIOWrapper name={0!r} encoding={1!r}>".format(
1493 name, self.encoding)
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001494
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001495 @property
1496 def encoding(self):
1497 return self._encoding
1498
1499 @property
1500 def errors(self):
1501 return self._errors
1502
1503 @property
1504 def line_buffering(self):
1505 return self._line_buffering
1506
1507 def seekable(self):
1508 return self._seekable
1509
1510 def readable(self):
1511 return self.buffer.readable()
1512
1513 def writable(self):
1514 return self.buffer.writable()
1515
1516 def flush(self):
1517 self.buffer.flush()
1518 self._telling = self._seekable
1519
1520 def close(self):
Antoine Pitrou6be88762010-05-03 16:48:20 +00001521 if self.buffer is not None and not self.closed:
1522 self.flush()
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001523 self.buffer.close()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001524
1525 @property
1526 def closed(self):
1527 return self.buffer.closed
1528
1529 @property
1530 def name(self):
1531 return self.buffer.name
1532
1533 def fileno(self):
1534 return self.buffer.fileno()
1535
1536 def isatty(self):
1537 return self.buffer.isatty()
1538
1539 def write(self, s: str):
1540 if self.closed:
1541 raise ValueError("write to closed file")
1542 if not isinstance(s, str):
1543 raise TypeError("can't write %s to text stream" %
1544 s.__class__.__name__)
1545 length = len(s)
1546 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1547 if haslf and self._writetranslate and self._writenl != "\n":
1548 s = s.replace("\n", self._writenl)
1549 encoder = self._encoder or self._get_encoder()
1550 # XXX What if we were just reading?
1551 b = encoder.encode(s)
1552 self.buffer.write(b)
1553 if self._line_buffering and (haslf or "\r" in s):
1554 self.flush()
1555 self._snapshot = None
1556 if self._decoder:
1557 self._decoder.reset()
1558 return length
1559
1560 def _get_encoder(self):
1561 make_encoder = codecs.getincrementalencoder(self._encoding)
1562 self._encoder = make_encoder(self._errors)
1563 return self._encoder
1564
1565 def _get_decoder(self):
1566 make_decoder = codecs.getincrementaldecoder(self._encoding)
1567 decoder = make_decoder(self._errors)
1568 if self._readuniversal:
1569 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1570 self._decoder = decoder
1571 return decoder
1572
1573 # The following three methods implement an ADT for _decoded_chars.
1574 # Text returned from the decoder is buffered here until the client
1575 # requests it by calling our read() or readline() method.
1576 def _set_decoded_chars(self, chars):
1577 """Set the _decoded_chars buffer."""
1578 self._decoded_chars = chars
1579 self._decoded_chars_used = 0
1580
1581 def _get_decoded_chars(self, n=None):
1582 """Advance into the _decoded_chars buffer."""
1583 offset = self._decoded_chars_used
1584 if n is None:
1585 chars = self._decoded_chars[offset:]
1586 else:
1587 chars = self._decoded_chars[offset:offset + n]
1588 self._decoded_chars_used += len(chars)
1589 return chars
1590
1591 def _rewind_decoded_chars(self, n):
1592 """Rewind the _decoded_chars buffer."""
1593 if self._decoded_chars_used < n:
1594 raise AssertionError("rewind decoded_chars out of bounds")
1595 self._decoded_chars_used -= n
1596
1597 def _read_chunk(self):
1598 """
1599 Read and decode the next chunk of data from the BufferedReader.
1600 """
1601
1602 # The return value is True unless EOF was reached. The decoded
1603 # string is placed in self._decoded_chars (replacing its previous
1604 # value). The entire input chunk is sent to the decoder, though
1605 # some of it may remain buffered in the decoder, yet to be
1606 # converted.
1607
1608 if self._decoder is None:
1609 raise ValueError("no decoder")
1610
1611 if self._telling:
1612 # To prepare for tell(), we need to snapshot a point in the
1613 # file where the decoder's input buffer is empty.
1614
1615 dec_buffer, dec_flags = self._decoder.getstate()
1616 # Given this, we know there was a valid snapshot point
1617 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1618
1619 # Read a chunk, decode it, and put the result in self._decoded_chars.
1620 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1621 eof = not input_chunk
1622 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1623
1624 if self._telling:
1625 # At the snapshot point, len(dec_buffer) bytes before the read,
1626 # the next input to be decoded is dec_buffer + input_chunk.
1627 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1628
1629 return not eof
1630
1631 def _pack_cookie(self, position, dec_flags=0,
1632 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1633 # The meaning of a tell() cookie is: seek to position, set the
1634 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1635 # into the decoder with need_eof as the EOF flag, then skip
1636 # chars_to_skip characters of the decoded result. For most simple
1637 # decoders, tell() will often just give a byte offset in the file.
1638 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1639 (chars_to_skip<<192) | bool(need_eof)<<256)
1640
1641 def _unpack_cookie(self, bigint):
1642 rest, position = divmod(bigint, 1<<64)
1643 rest, dec_flags = divmod(rest, 1<<64)
1644 rest, bytes_to_feed = divmod(rest, 1<<64)
1645 need_eof, chars_to_skip = divmod(rest, 1<<64)
1646 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1647
1648 def tell(self):
1649 if not self._seekable:
1650 raise IOError("underlying stream is not seekable")
1651 if not self._telling:
1652 raise IOError("telling position disabled by next() call")
1653 self.flush()
1654 position = self.buffer.tell()
1655 decoder = self._decoder
1656 if decoder is None or self._snapshot is None:
1657 if self._decoded_chars:
1658 # This should never happen.
1659 raise AssertionError("pending decoded text")
1660 return position
1661
1662 # Skip backward to the snapshot point (see _read_chunk).
1663 dec_flags, next_input = self._snapshot
1664 position -= len(next_input)
1665
1666 # How many decoded characters have been used up since the snapshot?
1667 chars_to_skip = self._decoded_chars_used
1668 if chars_to_skip == 0:
1669 # We haven't moved from the snapshot point.
1670 return self._pack_cookie(position, dec_flags)
1671
1672 # Starting from the snapshot position, we will walk the decoder
1673 # forward until it gives us enough decoded characters.
1674 saved_state = decoder.getstate()
1675 try:
1676 # Note our initial start point.
1677 decoder.setstate((b'', dec_flags))
1678 start_pos = position
1679 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1680 need_eof = 0
1681
1682 # Feed the decoder one byte at a time. As we go, note the
1683 # nearest "safe start point" before the current location
1684 # (a point where the decoder has nothing buffered, so seek()
1685 # can safely start from there and advance to this location).
1686 next_byte = bytearray(1)
1687 for next_byte[0] in next_input:
1688 bytes_fed += 1
1689 chars_decoded += len(decoder.decode(next_byte))
1690 dec_buffer, dec_flags = decoder.getstate()
1691 if not dec_buffer and chars_decoded <= chars_to_skip:
1692 # Decoder buffer is empty, so this is a safe start point.
1693 start_pos += bytes_fed
1694 chars_to_skip -= chars_decoded
1695 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1696 if chars_decoded >= chars_to_skip:
1697 break
1698 else:
1699 # We didn't get enough decoded data; signal EOF to get more.
1700 chars_decoded += len(decoder.decode(b'', final=True))
1701 need_eof = 1
1702 if chars_decoded < chars_to_skip:
1703 raise IOError("can't reconstruct logical file position")
1704
1705 # The returned cookie corresponds to the last safe start point.
1706 return self._pack_cookie(
1707 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1708 finally:
1709 decoder.setstate(saved_state)
1710
1711 def truncate(self, pos=None):
1712 self.flush()
1713 if pos is None:
1714 pos = self.tell()
Antoine Pitrou905a2ff2010-01-31 22:47:27 +00001715 return self.buffer.truncate(pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001716
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001717 def detach(self):
1718 if self.buffer is None:
1719 raise ValueError("buffer is already detached")
1720 self.flush()
1721 buffer = self.buffer
1722 self.buffer = None
1723 return buffer
1724
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001725 def seek(self, cookie, whence=0):
1726 if self.closed:
1727 raise ValueError("tell on closed file")
1728 if not self._seekable:
1729 raise IOError("underlying stream is not seekable")
1730 if whence == 1: # seek relative to current position
1731 if cookie != 0:
1732 raise IOError("can't do nonzero cur-relative seeks")
1733 # Seeking to the current position should attempt to
1734 # sync the underlying buffer with the current position.
1735 whence = 0
1736 cookie = self.tell()
1737 if whence == 2: # seek relative to end of file
1738 if cookie != 0:
1739 raise IOError("can't do nonzero end-relative seeks")
1740 self.flush()
1741 position = self.buffer.seek(0, 2)
1742 self._set_decoded_chars('')
1743 self._snapshot = None
1744 if self._decoder:
1745 self._decoder.reset()
1746 return position
1747 if whence != 0:
1748 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1749 (whence,))
1750 if cookie < 0:
1751 raise ValueError("negative seek position %r" % (cookie,))
1752 self.flush()
1753
1754 # The strategy of seek() is to go back to the safe start point
1755 # and replay the effect of read(chars_to_skip) from there.
1756 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1757 self._unpack_cookie(cookie)
1758
1759 # Seek back to the safe start point.
1760 self.buffer.seek(start_pos)
1761 self._set_decoded_chars('')
1762 self._snapshot = None
1763
1764 # Restore the decoder to its state from the safe start point.
Benjamin Peterson9363a652009-03-05 00:42:09 +00001765 if cookie == 0 and self._decoder:
1766 self._decoder.reset()
1767 elif self._decoder or dec_flags or chars_to_skip:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001768 self._decoder = self._decoder or self._get_decoder()
1769 self._decoder.setstate((b'', dec_flags))
1770 self._snapshot = (dec_flags, b'')
1771
1772 if chars_to_skip:
1773 # Just like _read_chunk, feed the decoder and save a snapshot.
1774 input_chunk = self.buffer.read(bytes_to_feed)
1775 self._set_decoded_chars(
1776 self._decoder.decode(input_chunk, need_eof))
1777 self._snapshot = (dec_flags, input_chunk)
1778
1779 # Skip chars_to_skip of the decoded characters.
1780 if len(self._decoded_chars) < chars_to_skip:
1781 raise IOError("can't restore logical file position")
1782 self._decoded_chars_used = chars_to_skip
1783
Antoine Pitroue4501852009-05-14 18:55:55 +00001784 # Finally, reset the encoder (merely useful for proper BOM handling)
1785 try:
1786 encoder = self._encoder or self._get_encoder()
1787 except LookupError:
1788 # Sometimes the encoder doesn't exist
1789 pass
1790 else:
1791 if cookie != 0:
1792 encoder.setstate(0)
1793 else:
1794 encoder.reset()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001795 return cookie
1796
1797 def read(self, n=None):
Benjamin Petersona1b49012009-03-31 23:11:32 +00001798 self._checkReadable()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001799 if n is None:
1800 n = -1
1801 decoder = self._decoder or self._get_decoder()
Florent Xiclunab14930c2010-03-13 15:26:44 +00001802 try:
1803 n.__index__
1804 except AttributeError as err:
1805 raise TypeError("an integer is required") from err
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001806 if n < 0:
1807 # Read everything.
1808 result = (self._get_decoded_chars() +
1809 decoder.decode(self.buffer.read(), final=True))
1810 self._set_decoded_chars('')
1811 self._snapshot = None
1812 return result
1813 else:
1814 # Keep reading chunks until we have n characters to return.
1815 eof = False
1816 result = self._get_decoded_chars(n)
1817 while len(result) < n and not eof:
1818 eof = not self._read_chunk()
1819 result += self._get_decoded_chars(n - len(result))
1820 return result
1821
1822 def __next__(self):
1823 self._telling = False
1824 line = self.readline()
1825 if not line:
1826 self._snapshot = None
1827 self._telling = self._seekable
1828 raise StopIteration
1829 return line
1830
1831 def readline(self, limit=None):
1832 if self.closed:
1833 raise ValueError("read from closed file")
1834 if limit is None:
1835 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +00001836 elif not isinstance(limit, int):
1837 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001838
1839 # Grab all the decoded text (we will rewind any extra bits later).
1840 line = self._get_decoded_chars()
1841
1842 start = 0
1843 # Make the decoder if it doesn't already exist.
1844 if not self._decoder:
1845 self._get_decoder()
1846
1847 pos = endpos = None
1848 while True:
1849 if self._readtranslate:
1850 # Newlines are already translated, only search for \n
1851 pos = line.find('\n', start)
1852 if pos >= 0:
1853 endpos = pos + 1
1854 break
1855 else:
1856 start = len(line)
1857
1858 elif self._readuniversal:
1859 # Universal newline search. Find any of \r, \r\n, \n
1860 # The decoder ensures that \r\n are not split in two pieces
1861
1862 # In C we'd look for these in parallel of course.
1863 nlpos = line.find("\n", start)
1864 crpos = line.find("\r", start)
1865 if crpos == -1:
1866 if nlpos == -1:
1867 # Nothing found
1868 start = len(line)
1869 else:
1870 # Found \n
1871 endpos = nlpos + 1
1872 break
1873 elif nlpos == -1:
1874 # Found lone \r
1875 endpos = crpos + 1
1876 break
1877 elif nlpos < crpos:
1878 # Found \n
1879 endpos = nlpos + 1
1880 break
1881 elif nlpos == crpos + 1:
1882 # Found \r\n
1883 endpos = crpos + 2
1884 break
1885 else:
1886 # Found \r
1887 endpos = crpos + 1
1888 break
1889 else:
1890 # non-universal
1891 pos = line.find(self._readnl)
1892 if pos >= 0:
1893 endpos = pos + len(self._readnl)
1894 break
1895
1896 if limit >= 0 and len(line) >= limit:
1897 endpos = limit # reached length limit
1898 break
1899
1900 # No line ending seen yet - get more data'
1901 while self._read_chunk():
1902 if self._decoded_chars:
1903 break
1904 if self._decoded_chars:
1905 line += self._get_decoded_chars()
1906 else:
1907 # end of file
1908 self._set_decoded_chars('')
1909 self._snapshot = None
1910 return line
1911
1912 if limit >= 0 and endpos > limit:
1913 endpos = limit # don't exceed limit
1914
1915 # Rewind _decoded_chars to just after the line ending we found.
1916 self._rewind_decoded_chars(len(line) - endpos)
1917 return line[:endpos]
1918
1919 @property
1920 def newlines(self):
1921 return self._decoder.newlines if self._decoder else None
1922
1923
1924class StringIO(TextIOWrapper):
1925 """Text I/O implementation using an in-memory buffer.
1926
1927 The initial_value argument sets the value of object. The newline
1928 argument is like the one of TextIOWrapper's constructor.
1929 """
1930
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001931 def __init__(self, initial_value="", newline="\n"):
1932 super(StringIO, self).__init__(BytesIO(),
1933 encoding="utf-8",
1934 errors="strict",
1935 newline=newline)
Antoine Pitrou11446482009-04-04 14:09:30 +00001936 # Issue #5645: make universal newlines semantics the same as in the
1937 # C version, even under Windows.
1938 if newline is None:
1939 self._writetranslate = False
Alexandre Vassalottid2bb18b2009-07-22 03:07:33 +00001940 if initial_value is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001941 if not isinstance(initial_value, str):
Alexandre Vassalottid2bb18b2009-07-22 03:07:33 +00001942 raise TypeError("initial_value must be str or None, not {0}"
1943 .format(type(initial_value).__name__))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001944 initial_value = str(initial_value)
1945 self.write(initial_value)
1946 self.seek(0)
1947
1948 def getvalue(self):
1949 self.flush()
1950 return self.buffer.getvalue().decode(self._encoding, self._errors)
Benjamin Peterson9fd459a2009-03-09 00:09:44 +00001951
1952 def __repr__(self):
1953 # TextIOWrapper tells the encoding in its repr. In StringIO,
1954 # that's a implementation detail.
1955 return object.__repr__(self)
Benjamin Petersonb487e632009-03-21 03:08:31 +00001956
1957 @property
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001958 def errors(self):
1959 return None
1960
1961 @property
Benjamin Petersonb487e632009-03-21 03:08:31 +00001962 def encoding(self):
1963 return None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001964
1965 def detach(self):
1966 # This doesn't make sense on StringIO.
1967 self._unsupported("detach")