blob: 5474a4e6a8c5aba4e7c1a7b17cd4cfa2a9b34495 [file] [log] [blame]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001"""
2Python implementation of the io module.
3"""
4
5import os
6import abc
7import codecs
Benjamin Peterson59406a92009-03-26 17:10:29 +00008import warnings
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00009# Import _thread instead of threading to reduce startup cost
10try:
11 from _thread import allocate_lock as Lock
12except ImportError:
13 from _dummy_thread import allocate_lock as Lock
14
15import io
16from io import __all__
Benjamin Peterson8d5fd4e2009-04-02 01:03:26 +000017from io import SEEK_SET, SEEK_CUR, SEEK_END
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000018
19# open() uses st_blksize whenever we can
20DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
21
22# NOTE: Base classes defined here are registered with the "official" ABCs
23# defined in io.py. We don't use real inheritance though, because we don't
24# want to inherit the C implementations.
25
26
27class BlockingIOError(IOError):
28
29 """Exception raised when I/O would block on a non-blocking I/O stream."""
30
31 def __init__(self, errno, strerror, characters_written=0):
32 super().__init__(errno, strerror)
33 if not isinstance(characters_written, int):
34 raise TypeError("characters_written must be a integer")
35 self.characters_written = characters_written
36
37
Raymond Hettingerd2b03e12011-01-12 23:52:40 +000038def open(file, mode = "r", buffering = None,
39 encoding = None, errors = None,
40 newline = None, closefd = True) -> "IOBase":
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000041
42 r"""Open file and return a stream. Raise IOError upon failure.
43
44 file is either a text or byte string giving the name (and the path
45 if the file isn't in the current working directory) of the file to
46 be opened or an integer file descriptor of the file to be
47 wrapped. (If a file descriptor is given, it is closed when the
48 returned I/O object is closed, unless closefd is set to False.)
49
50 mode is an optional string that specifies the mode in which the file
51 is opened. It defaults to 'r' which means open for reading in text
52 mode. Other common values are 'w' for writing (truncating the file if
53 it already exists), and 'a' for appending (which on some Unix systems,
54 means that all writes append to the end of the file regardless of the
55 current seek position). In text mode, if encoding is not specified the
56 encoding used is platform dependent. (For reading and writing raw
57 bytes use binary mode and leave encoding unspecified.) The available
58 modes are:
59
60 ========= ===============================================================
61 Character Meaning
62 --------- ---------------------------------------------------------------
63 'r' open for reading (default)
64 'w' open for writing, truncating the file first
65 'a' open for writing, appending to the end of the file if it exists
66 'b' binary mode
67 't' text mode (default)
68 '+' open a disk file for updating (reading and writing)
69 'U' universal newline mode (for backwards compatibility; unneeded
70 for new code)
71 ========= ===============================================================
72
73 The default mode is 'rt' (open for reading text). For binary random
74 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
75 'r+b' opens the file without truncation.
76
77 Python distinguishes between files opened in binary and text modes,
78 even when the underlying operating system doesn't. Files opened in
79 binary mode (appending 'b' to the mode argument) return contents as
80 bytes objects without any decoding. In text mode (the default, or when
81 't' is appended to the mode argument), the contents of the file are
82 returned as strings, the bytes having been first decoded using a
83 platform-dependent encoding or using the specified encoding if given.
84
Antoine Pitrou45a43722009-12-19 21:09:58 +000085 buffering is an optional integer used to set the buffering policy.
86 Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
87 line buffering (only usable in text mode), and an integer > 1 to indicate
88 the size of a fixed-size chunk buffer. When no buffering argument is
89 given, the default buffering policy works as follows:
90
91 * Binary files are buffered in fixed-size chunks; the size of the buffer
92 is chosen using a heuristic trying to determine the underlying device's
93 "block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
94 On many systems, the buffer will typically be 4096 or 8192 bytes long.
95
96 * "Interactive" text files (files for which isatty() returns True)
97 use line buffering. Other text files use the policy described above
98 for binary files.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +000099
100 encoding is the name of the encoding used to decode or encode the
101 file. This should only be used in text mode. The default encoding is
102 platform dependent, but any encoding supported by Python can be
103 passed. See the codecs module for the list of supported encodings.
104
105 errors is an optional string that specifies how encoding errors are to
106 be handled---this argument should not be used in binary mode. Pass
107 'strict' to raise a ValueError exception if there is an encoding error
108 (the default of None has the same effect), or pass 'ignore' to ignore
109 errors. (Note that ignoring encoding errors can lead to data loss.)
110 See the documentation for codecs.register for a list of the permitted
111 encoding error strings.
112
113 newline controls how universal newlines works (it only applies to text
114 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
115 follows:
116
117 * On input, if newline is None, universal newlines mode is
118 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
119 these are translated into '\n' before being returned to the
120 caller. If it is '', universal newline mode is enabled, but line
121 endings are returned to the caller untranslated. If it has any of
122 the other legal values, input lines are only terminated by the given
123 string, and the line ending is returned to the caller untranslated.
124
125 * On output, if newline is None, any '\n' characters written are
126 translated to the system default line separator, os.linesep. If
127 newline is '', no translation takes place. If newline is any of the
128 other legal values, any '\n' characters written are translated to
129 the given string.
130
131 If closefd is False, the underlying file descriptor will be kept open
132 when the file is closed. This does not work when a file name is given
133 and must be True in that case.
134
135 open() returns a file object whose type depends on the mode, and
136 through which the standard file operations such as reading and writing
137 are performed. When open() is used to open a file in a text mode ('w',
138 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
139 a file in a binary mode, the returned class varies: in read binary
140 mode, it returns a BufferedReader; in write binary and append binary
141 modes, it returns a BufferedWriter, and in read/write mode, it returns
142 a BufferedRandom.
143
144 It is also possible to use a string or bytearray as a file for both
145 reading and writing. For strings StringIO can be used like a file
146 opened in a text mode, and for bytes a BytesIO can be used like a file
147 opened in a binary mode.
148 """
149 if not isinstance(file, (str, bytes, int)):
150 raise TypeError("invalid file: %r" % file)
151 if not isinstance(mode, str):
152 raise TypeError("invalid mode: %r" % mode)
153 if buffering is not None and not isinstance(buffering, int):
154 raise TypeError("invalid buffering: %r" % buffering)
155 if encoding is not None and not isinstance(encoding, str):
156 raise TypeError("invalid encoding: %r" % encoding)
157 if errors is not None and not isinstance(errors, str):
158 raise TypeError("invalid errors: %r" % errors)
159 modes = set(mode)
160 if modes - set("arwb+tU") or len(mode) > len(modes):
161 raise ValueError("invalid mode: %r" % mode)
162 reading = "r" in modes
163 writing = "w" in modes
164 appending = "a" in modes
165 updating = "+" in modes
166 text = "t" in modes
167 binary = "b" in modes
168 if "U" in modes:
169 if writing or appending:
170 raise ValueError("can't use U and writing mode at once")
171 reading = True
172 if text and binary:
173 raise ValueError("can't have text and binary mode at once")
174 if reading + writing + appending > 1:
175 raise ValueError("can't have read/write/append mode at once")
176 if not (reading or writing or appending):
177 raise ValueError("must have exactly one of read/write/append mode")
178 if binary and encoding is not None:
179 raise ValueError("binary mode doesn't take an encoding argument")
180 if binary and errors is not None:
181 raise ValueError("binary mode doesn't take an errors argument")
182 if binary and newline is not None:
183 raise ValueError("binary mode doesn't take a newline argument")
184 raw = FileIO(file,
185 (reading and "r" or "") +
186 (writing and "w" or "") +
187 (appending and "a" or "") +
188 (updating and "+" or ""),
189 closefd)
190 if buffering is None:
191 buffering = -1
192 line_buffering = False
193 if buffering == 1 or buffering < 0 and raw.isatty():
194 buffering = -1
195 line_buffering = True
196 if buffering < 0:
197 buffering = DEFAULT_BUFFER_SIZE
198 try:
199 bs = os.fstat(raw.fileno()).st_blksize
200 except (os.error, AttributeError):
201 pass
202 else:
203 if bs > 1:
204 buffering = bs
205 if buffering < 0:
206 raise ValueError("invalid buffering size")
207 if buffering == 0:
208 if binary:
209 return raw
210 raise ValueError("can't have unbuffered text I/O")
211 if updating:
212 buffer = BufferedRandom(raw, buffering)
213 elif writing or appending:
214 buffer = BufferedWriter(raw, buffering)
215 elif reading:
216 buffer = BufferedReader(raw, buffering)
217 else:
218 raise ValueError("unknown mode: %r" % mode)
219 if binary:
220 return buffer
221 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
222 text.mode = mode
223 return text
224
225
226class DocDescriptor:
227 """Helper for builtins.open.__doc__
228 """
229 def __get__(self, obj, typ):
230 return (
231 "open(file, mode='r', buffering=None, encoding=None, "
232 "errors=None, newline=None, closefd=True)\n\n" +
233 open.__doc__)
234
235class OpenWrapper:
236 """Wrapper for builtins.open
237
238 Trick so that open won't become a bound method when stored
239 as a class variable (as dbm.dumb does).
240
241 See initstdio() in Python/pythonrun.c.
242 """
243 __doc__ = DocDescriptor()
244
245 def __new__(cls, *args, **kwargs):
246 return open(*args, **kwargs)
247
248
249class UnsupportedOperation(ValueError, IOError):
250 pass
251
252
253class IOBase(metaclass=abc.ABCMeta):
254
255 """The abstract base class for all I/O classes, acting on streams of
256 bytes. There is no public constructor.
257
258 This class provides dummy implementations for many methods that
259 derived classes can override selectively; the default implementations
260 represent a file that cannot be read, written or seeked.
261
262 Even though IOBase does not declare read, readinto, or write because
263 their signatures will vary, implementations and clients should
264 consider those methods part of the interface. Also, implementations
265 may raise a IOError when operations they do not support are called.
266
267 The basic type used for binary data read from or written to a file is
268 bytes. bytearrays are accepted too, and in some cases (such as
269 readinto) needed. Text I/O classes work with str data.
270
271 Note that calling any method (even inquiries) on a closed stream is
272 undefined. Implementations may raise IOError in this case.
273
274 IOBase (and its subclasses) support the iterator protocol, meaning
275 that an IOBase object can be iterated over yielding the lines in a
276 stream.
277
278 IOBase also supports the :keyword:`with` statement. In this example,
279 fp is closed after the suite of the with statement is complete:
280
281 with open('spam.txt', 'r') as fp:
282 fp.write('Spam and eggs!')
283 """
284
285 ### Internal ###
286
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000287 def _unsupported(self, name):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000288 """Internal: raise an exception for unsupported operations."""
289 raise UnsupportedOperation("%s.%s() not supported" %
290 (self.__class__.__name__, name))
291
292 ### Positioning ###
293
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000294 def seek(self, pos, whence = 0):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000295 """Change stream position.
296
297 Change the stream position to byte offset offset. offset is
298 interpreted relative to the position indicated by whence. Values
299 for whence are:
300
301 * 0 -- start of stream (the default); offset should be zero or positive
302 * 1 -- current stream position; offset may be negative
303 * 2 -- end of stream; offset is usually negative
304
305 Return the new absolute position.
306 """
307 self._unsupported("seek")
308
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000309 def tell(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000310 """Return current stream position."""
311 return self.seek(0, 1)
312
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000313 def truncate(self, pos = None):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000314 """Truncate file to size bytes.
315
316 Size defaults to the current IO position as reported by tell(). Return
317 the new size.
318 """
319 self._unsupported("truncate")
320
321 ### Flush and close ###
322
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000323 def flush(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000324 """Flush write buffers, if applicable.
325
326 This is not implemented for read-only and non-blocking streams.
327 """
Antoine Pitroufaf90072010-05-03 16:58:19 +0000328 self._checkClosed()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000329 # XXX Should this return the number of bytes written???
330
331 __closed = False
332
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000333 def close(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000334 """Flush and close the IO object.
335
336 This method has no effect if the file is already closed.
337 """
338 if not self.__closed:
Antoine Pitroufaf90072010-05-03 16:58:19 +0000339 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000340 self.__closed = True
341
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000342 def __del__(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000343 """Destructor. Calls close()."""
344 # The try/except block is in case this is called at program
345 # exit time, when it's possible that globals have already been
346 # deleted, and then the close() call might fail. Since
347 # there's nothing we can do about such failures and they annoy
348 # the end users, we suppress the traceback.
349 try:
350 self.close()
351 except:
352 pass
353
354 ### Inquiries ###
355
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000356 def seekable(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000357 """Return whether object supports random access.
358
359 If False, seek(), tell() and truncate() will raise IOError.
360 This method may need to do a test seek().
361 """
362 return False
363
364 def _checkSeekable(self, msg=None):
365 """Internal: raise an IOError if file is not seekable
366 """
367 if not self.seekable():
368 raise IOError("File or stream is not seekable."
369 if msg is None else msg)
370
371
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000372 def readable(self):
373
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000374 """Return whether object was opened for reading.
375
376 If False, read() will raise IOError.
377 """
378 return False
379
380 def _checkReadable(self, msg=None):
381 """Internal: raise an IOError if file is not readable
382 """
383 if not self.readable():
384 raise IOError("File or stream is not readable."
385 if msg is None else msg)
386
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000387 def writable(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000388 """Return whether object was opened for writing.
389
390 If False, write() and truncate() will raise IOError.
391 """
392 return False
393
394 def _checkWritable(self, msg=None):
395 """Internal: raise an IOError if file is not writable
396 """
397 if not self.writable():
398 raise IOError("File or stream is not writable."
399 if msg is None else msg)
400
401 @property
402 def closed(self):
403 """closed: bool. True iff the file has been closed.
404
405 For backwards compatibility, this is a property, not a predicate.
406 """
407 return self.__closed
408
409 def _checkClosed(self, msg=None):
410 """Internal: raise an ValueError if file is closed
411 """
412 if self.closed:
413 raise ValueError("I/O operation on closed file."
414 if msg is None else msg)
415
416 ### Context manager ###
417
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000418 def __enter__(self): # That's a forward reference
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000419 """Context management protocol. Returns self."""
420 self._checkClosed()
421 return self
422
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000423 def __exit__(self, *args):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000424 """Context management protocol. Calls close()"""
425 self.close()
426
427 ### Lower-level APIs ###
428
429 # XXX Should these be present even if unimplemented?
430
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000431 def fileno(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000432 """Returns underlying file descriptor if one exists.
433
434 An IOError is raised if the IO object does not use a file descriptor.
435 """
436 self._unsupported("fileno")
437
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000438 def isatty(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000439 """Return whether this is an 'interactive' stream.
440
441 Return False if it can't be determined.
442 """
443 self._checkClosed()
444 return False
445
446 ### Readline[s] and writelines ###
447
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000448 def readline(self, limit = -1):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000449 r"""Read and return a line from the stream.
450
451 If limit is specified, at most limit bytes will be read.
452
453 The line terminator is always b'\n' for binary files; for text
454 files, the newlines argument to open can be used to select the line
455 terminator(s) recognized.
456 """
457 # For backwards compatibility, a (slowish) readline().
458 if hasattr(self, "peek"):
459 def nreadahead():
460 readahead = self.peek(1)
461 if not readahead:
462 return 1
463 n = (readahead.find(b"\n") + 1) or len(readahead)
464 if limit >= 0:
465 n = min(n, limit)
466 return n
467 else:
468 def nreadahead():
469 return 1
470 if limit is None:
471 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +0000472 elif not isinstance(limit, int):
473 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000474 res = bytearray()
475 while limit < 0 or len(res) < limit:
476 b = self.read(nreadahead())
477 if not b:
478 break
479 res += b
480 if res.endswith(b"\n"):
481 break
482 return bytes(res)
483
484 def __iter__(self):
485 self._checkClosed()
486 return self
487
488 def __next__(self):
489 line = self.readline()
490 if not line:
491 raise StopIteration
492 return line
493
494 def readlines(self, hint=None):
495 """Return a list of lines from the stream.
496
497 hint can be specified to control the number of lines read: no more
498 lines will be read if the total size (in bytes/characters) of all
499 lines so far exceeds hint.
500 """
501 if hint is None or hint <= 0:
502 return list(self)
503 n = 0
504 lines = []
505 for line in self:
506 lines.append(line)
507 n += len(line)
508 if n >= hint:
509 break
510 return lines
511
512 def writelines(self, lines):
513 self._checkClosed()
514 for line in lines:
515 self.write(line)
516
517io.IOBase.register(IOBase)
518
519
520class RawIOBase(IOBase):
521
522 """Base class for raw binary I/O."""
523
524 # The read() method is implemented by calling readinto(); derived
525 # classes that want to support read() only need to implement
526 # readinto() as a primitive operation. In general, readinto() can be
527 # more efficient than read().
528
529 # (It would be tempting to also provide an implementation of
530 # readinto() in terms of read(), in case the latter is a more suitable
531 # primitive operation, but that would lead to nasty recursion in case
532 # a subclass doesn't implement either.)
533
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000534 def read(self, n = -1):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000535 """Read and return up to n bytes.
536
537 Returns an empty bytes object on EOF, or None if the object is
538 set not to block and has no data to read.
539 """
540 if n is None:
541 n = -1
542 if n < 0:
543 return self.readall()
544 b = bytearray(n.__index__())
545 n = self.readinto(b)
Antoine Pitroue5e75c62010-09-14 18:53:07 +0000546 if n is None:
547 return None
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000548 del b[n:]
549 return bytes(b)
550
551 def readall(self):
552 """Read until EOF, using multiple read() call."""
553 res = bytearray()
554 while True:
555 data = self.read(DEFAULT_BUFFER_SIZE)
556 if not data:
557 break
558 res += data
Victor Stinnera80987f2011-05-25 22:47:16 +0200559 if res:
560 return bytes(res)
561 else:
562 # b'' or None
563 return data
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000564
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000565 def readinto(self, b):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000566 """Read up to len(b) bytes into b.
567
568 Returns number of bytes read (0 for EOF), or None if the object
Antoine Pitroue5e75c62010-09-14 18:53:07 +0000569 is set not to block and has no data to read.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000570 """
571 self._unsupported("readinto")
572
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000573 def write(self, b):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000574 """Write the given buffer to the IO stream.
575
576 Returns the number of bytes written, which may be less than len(b).
577 """
578 self._unsupported("write")
579
580io.RawIOBase.register(RawIOBase)
581from _io import FileIO
582RawIOBase.register(FileIO)
583
584
585class BufferedIOBase(IOBase):
586
587 """Base class for buffered IO objects.
588
589 The main difference with RawIOBase is that the read() method
590 supports omitting the size argument, and does not have a default
591 implementation that defers to readinto().
592
593 In addition, read(), readinto() and write() may raise
594 BlockingIOError if the underlying raw stream is in non-blocking
595 mode and not ready; unlike their raw counterparts, they will never
596 return None.
597
598 A typical implementation should not inherit from a RawIOBase
599 implementation, but wrap one.
600 """
601
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000602 def read(self, n = None):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000603 """Read and return up to n bytes.
604
605 If the argument is omitted, None, or negative, reads and
606 returns all data until EOF.
607
608 If the argument is positive, and the underlying raw stream is
609 not 'interactive', multiple raw reads may be issued to satisfy
610 the byte count (unless EOF is reached first). But for
611 interactive raw streams (XXX and for pipes?), at most one raw
612 read will be issued, and a short result does not imply that
613 EOF is imminent.
614
615 Returns an empty bytes array on EOF.
616
617 Raises BlockingIOError if the underlying raw stream has no
618 data at the moment.
619 """
620 self._unsupported("read")
621
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000622 def read1(self, n = None):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000623 """Read up to n bytes with at most one read() system call."""
624 self._unsupported("read1")
625
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000626 def readinto(self, b):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000627 """Read up to len(b) bytes into b.
628
629 Like read(), this may issue multiple reads to the underlying raw
630 stream, unless the latter is 'interactive'.
631
632 Returns the number of bytes read (0 for EOF).
633
634 Raises BlockingIOError if the underlying raw stream has no
635 data at the moment.
636 """
637 # XXX This ought to work with anything that supports the buffer API
638 data = self.read(len(b))
639 n = len(data)
640 try:
641 b[:n] = data
642 except TypeError as err:
643 import array
644 if not isinstance(b, array.array):
645 raise err
646 b[:n] = array.array('b', data)
647 return n
648
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000649 def write(self, b):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000650 """Write the given buffer to the IO stream.
651
652 Return the number of bytes written, which is never less than
653 len(b).
654
655 Raises BlockingIOError if the buffer is full and the
656 underlying raw stream cannot accept more data at the moment.
657 """
658 self._unsupported("write")
659
Raymond Hettingerd2b03e12011-01-12 23:52:40 +0000660 def detach(self):
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000661 """
662 Separate the underlying raw stream from the buffer and return it.
663
664 After the raw stream has been detached, the buffer is in an unusable
665 state.
666 """
667 self._unsupported("detach")
668
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000669io.BufferedIOBase.register(BufferedIOBase)
670
671
672class _BufferedIOMixin(BufferedIOBase):
673
674 """A mixin implementation of BufferedIOBase with an underlying raw stream.
675
676 This passes most requests on to the underlying raw stream. It
677 does *not* provide implementations of read(), readinto() or
678 write().
679 """
680
681 def __init__(self, raw):
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000682 self._raw = raw
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000683
684 ### Positioning ###
685
686 def seek(self, pos, whence=0):
687 new_position = self.raw.seek(pos, whence)
688 if new_position < 0:
689 raise IOError("seek() returned an invalid position")
690 return new_position
691
692 def tell(self):
693 pos = self.raw.tell()
694 if pos < 0:
695 raise IOError("tell() returned an invalid position")
696 return pos
697
698 def truncate(self, pos=None):
699 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
700 # and a flush may be necessary to synch both views of the current
701 # file state.
702 self.flush()
703
704 if pos is None:
705 pos = self.tell()
706 # XXX: Should seek() be used, instead of passing the position
707 # XXX directly to truncate?
708 return self.raw.truncate(pos)
709
710 ### Flush and close ###
711
712 def flush(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000713 if self.closed:
714 raise ValueError("flush of closed file")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000715 self.raw.flush()
716
717 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +0000718 if self.raw is not None and not self.closed:
719 self.flush()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000720 self.raw.close()
721
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000722 def detach(self):
723 if self.raw is None:
724 raise ValueError("raw stream already detached")
725 self.flush()
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000726 raw = self._raw
727 self._raw = None
Benjamin Petersond2e0c792009-05-01 20:40:59 +0000728 return raw
729
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000730 ### Inquiries ###
731
732 def seekable(self):
733 return self.raw.seekable()
734
735 def readable(self):
736 return self.raw.readable()
737
738 def writable(self):
739 return self.raw.writable()
740
741 @property
Antoine Pitrou6cfc5122010-12-21 21:26:09 +0000742 def raw(self):
743 return self._raw
744
745 @property
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000746 def closed(self):
747 return self.raw.closed
748
749 @property
750 def name(self):
751 return self.raw.name
752
753 @property
754 def mode(self):
755 return self.raw.mode
756
Antoine Pitrou716c4442009-05-23 19:04:03 +0000757 def __repr__(self):
758 clsname = self.__class__.__name__
759 try:
760 name = self.name
761 except AttributeError:
762 return "<_pyio.{0}>".format(clsname)
763 else:
764 return "<_pyio.{0} name={1!r}>".format(clsname, name)
765
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000766 ### Lower-level APIs ###
767
768 def fileno(self):
769 return self.raw.fileno()
770
771 def isatty(self):
772 return self.raw.isatty()
773
774
775class BytesIO(BufferedIOBase):
776
777 """Buffered I/O implementation using an in-memory bytes buffer."""
778
779 def __init__(self, initial_bytes=None):
780 buf = bytearray()
781 if initial_bytes is not None:
782 buf += initial_bytes
783 self._buffer = buf
784 self._pos = 0
785
786 def getvalue(self):
787 """Return the bytes value (contents) of the buffer
788 """
789 if self.closed:
790 raise ValueError("getvalue on closed file")
791 return bytes(self._buffer)
792
793 def read(self, n=None):
794 if self.closed:
795 raise ValueError("read from closed file")
796 if n is None:
797 n = -1
798 if n < 0:
799 n = len(self._buffer)
800 if len(self._buffer) <= self._pos:
801 return b""
802 newpos = min(len(self._buffer), self._pos + n)
803 b = self._buffer[self._pos : newpos]
804 self._pos = newpos
805 return bytes(b)
806
807 def read1(self, n):
808 """This is the same as read.
809 """
810 return self.read(n)
811
812 def write(self, b):
813 if self.closed:
814 raise ValueError("write to closed file")
815 if isinstance(b, str):
816 raise TypeError("can't write str to binary stream")
817 n = len(b)
818 if n == 0:
819 return 0
820 pos = self._pos
821 if pos > len(self._buffer):
822 # Inserts null bytes between the current end of the file
823 # and the new write position.
824 padding = b'\x00' * (pos - len(self._buffer))
825 self._buffer += padding
826 self._buffer[pos:pos + n] = b
827 self._pos += n
828 return n
829
830 def seek(self, pos, whence=0):
831 if self.closed:
832 raise ValueError("seek on closed file")
833 try:
834 pos = pos.__index__()
835 except AttributeError as err:
836 raise TypeError("an integer is required") from err
837 if whence == 0:
838 if pos < 0:
839 raise ValueError("negative seek position %r" % (pos,))
840 self._pos = pos
841 elif whence == 1:
842 self._pos = max(0, self._pos + pos)
843 elif whence == 2:
844 self._pos = max(0, len(self._buffer) + pos)
845 else:
846 raise ValueError("invalid whence value")
847 return self._pos
848
849 def tell(self):
850 if self.closed:
851 raise ValueError("tell on closed file")
852 return self._pos
853
854 def truncate(self, pos=None):
855 if self.closed:
856 raise ValueError("truncate on closed file")
857 if pos is None:
858 pos = self._pos
859 elif pos < 0:
860 raise ValueError("negative truncate position %r" % (pos,))
861 del self._buffer[pos:]
Antoine Pitrou66f9fea2010-01-31 23:20:26 +0000862 return pos
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000863
864 def readable(self):
865 return True
866
867 def writable(self):
868 return True
869
870 def seekable(self):
871 return True
872
873
874class BufferedReader(_BufferedIOMixin):
875
876 """BufferedReader(raw[, buffer_size])
877
878 A buffer for a readable, sequential BaseRawIO object.
879
880 The constructor creates a BufferedReader for the given readable raw
881 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
882 is used.
883 """
884
885 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
886 """Create a new buffered reader using the given readable raw IO object.
887 """
Antoine Pitroucf4c7492009-04-19 00:09:36 +0000888 if not raw.readable():
889 raise IOError('"raw" argument must be readable.')
890
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +0000891 _BufferedIOMixin.__init__(self, raw)
892 if buffer_size <= 0:
893 raise ValueError("invalid buffer size")
894 self.buffer_size = buffer_size
895 self._reset_read_buf()
896 self._read_lock = Lock()
897
898 def _reset_read_buf(self):
899 self._read_buf = b""
900 self._read_pos = 0
901
902 def read(self, n=None):
903 """Read n bytes.
904
905 Returns exactly n bytes of data unless the underlying raw IO
906 stream reaches EOF or if the call would block in non-blocking
907 mode. If n is negative, read until EOF or until read() would
908 block.
909 """
910 if n is not None and n < -1:
911 raise ValueError("invalid number of bytes to read")
912 with self._read_lock:
913 return self._read_unlocked(n)
914
915 def _read_unlocked(self, n=None):
916 nodata_val = b""
917 empty_values = (b"", None)
918 buf = self._read_buf
919 pos = self._read_pos
920
921 # Special case for when the number of bytes to read is unspecified.
922 if n is None or n == -1:
923 self._reset_read_buf()
924 chunks = [buf[pos:]] # Strip the consumed bytes.
925 current_size = 0
926 while True:
927 # Read until EOF or until read() would block.
928 chunk = self.raw.read()
929 if chunk in empty_values:
930 nodata_val = chunk
931 break
932 current_size += len(chunk)
933 chunks.append(chunk)
934 return b"".join(chunks) or nodata_val
935
936 # The number of bytes to read is specified, return at most n bytes.
937 avail = len(buf) - pos # Length of the available buffered data.
938 if n <= avail:
939 # Fast path: the data to read is fully buffered.
940 self._read_pos += n
941 return buf[pos:pos+n]
942 # Slow path: read from the stream until enough bytes are read,
943 # or until an EOF occurs or until read() would block.
944 chunks = [buf[pos:]]
945 wanted = max(self.buffer_size, n)
946 while avail < n:
947 chunk = self.raw.read(wanted)
948 if chunk in empty_values:
949 nodata_val = chunk
950 break
951 avail += len(chunk)
952 chunks.append(chunk)
953 # n is more then avail only when an EOF occurred or when
954 # read() would have blocked.
955 n = min(n, avail)
956 out = b"".join(chunks)
957 self._read_buf = out[n:] # Save the extra data in the buffer.
958 self._read_pos = 0
959 return out[:n] if out else nodata_val
960
961 def peek(self, n=0):
962 """Returns buffered bytes without advancing the position.
963
964 The argument indicates a desired minimal number of bytes; we
965 do at most one raw read to satisfy it. We never return more
966 than self.buffer_size.
967 """
968 with self._read_lock:
969 return self._peek_unlocked(n)
970
971 def _peek_unlocked(self, n=0):
972 want = min(n, self.buffer_size)
973 have = len(self._read_buf) - self._read_pos
974 if have < want or have <= 0:
975 to_read = self.buffer_size - have
976 current = self.raw.read(to_read)
977 if current:
978 self._read_buf = self._read_buf[self._read_pos:] + current
979 self._read_pos = 0
980 return self._read_buf[self._read_pos:]
981
982 def read1(self, n):
983 """Reads up to n bytes, with at most one read() system call."""
984 # Returns up to n bytes. If at least one byte is buffered, we
985 # only return buffered bytes. Otherwise, we do one raw read.
986 if n < 0:
987 raise ValueError("number of bytes to read must be positive")
988 if n == 0:
989 return b""
990 with self._read_lock:
991 self._peek_unlocked(1)
992 return self._read_unlocked(
993 min(n, len(self._read_buf) - self._read_pos))
994
995 def tell(self):
996 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
997
998 def seek(self, pos, whence=0):
999 if not (0 <= whence <= 2):
1000 raise ValueError("invalid whence value")
1001 with self._read_lock:
1002 if whence == 1:
1003 pos -= len(self._read_buf) - self._read_pos
1004 pos = _BufferedIOMixin.seek(self, pos, whence)
1005 self._reset_read_buf()
1006 return pos
1007
1008class BufferedWriter(_BufferedIOMixin):
1009
1010 """A buffer for a writeable sequential RawIO object.
1011
1012 The constructor creates a BufferedWriter for the given writeable raw
1013 stream. If the buffer_size is not given, it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001014 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001015 """
1016
Benjamin Peterson59406a92009-03-26 17:10:29 +00001017 _warning_stack_offset = 2
1018
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001019 def __init__(self, raw,
1020 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001021 if not raw.writable():
1022 raise IOError('"raw" argument must be writable.')
1023
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001024 _BufferedIOMixin.__init__(self, raw)
1025 if buffer_size <= 0:
1026 raise ValueError("invalid buffer size")
Benjamin Peterson59406a92009-03-26 17:10:29 +00001027 if max_buffer_size is not None:
1028 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1029 self._warning_stack_offset)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001030 self.buffer_size = buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001031 self._write_buf = bytearray()
1032 self._write_lock = Lock()
1033
1034 def write(self, b):
1035 if self.closed:
1036 raise ValueError("write to closed file")
1037 if isinstance(b, str):
1038 raise TypeError("can't write str to binary stream")
1039 with self._write_lock:
1040 # XXX we can implement some more tricks to try and avoid
1041 # partial writes
1042 if len(self._write_buf) > self.buffer_size:
1043 # We're full, so let's pre-flush the buffer
1044 try:
1045 self._flush_unlocked()
1046 except BlockingIOError as e:
1047 # We can't accept anything else.
1048 # XXX Why not just let the exception pass through?
1049 raise BlockingIOError(e.errno, e.strerror, 0)
1050 before = len(self._write_buf)
1051 self._write_buf.extend(b)
1052 written = len(self._write_buf) - before
1053 if len(self._write_buf) > self.buffer_size:
1054 try:
1055 self._flush_unlocked()
1056 except BlockingIOError as e:
Benjamin Peterson394ee002009-03-05 22:33:59 +00001057 if len(self._write_buf) > self.buffer_size:
1058 # We've hit the buffer_size. We have to accept a partial
1059 # write and cut back our buffer.
1060 overage = len(self._write_buf) - self.buffer_size
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001061 written -= overage
Benjamin Peterson394ee002009-03-05 22:33:59 +00001062 self._write_buf = self._write_buf[:self.buffer_size]
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001063 raise BlockingIOError(e.errno, e.strerror, written)
1064 return written
1065
1066 def truncate(self, pos=None):
1067 with self._write_lock:
1068 self._flush_unlocked()
1069 if pos is None:
1070 pos = self.raw.tell()
1071 return self.raw.truncate(pos)
1072
1073 def flush(self):
1074 with self._write_lock:
1075 self._flush_unlocked()
1076
1077 def _flush_unlocked(self):
1078 if self.closed:
1079 raise ValueError("flush of closed file")
1080 written = 0
1081 try:
1082 while self._write_buf:
1083 n = self.raw.write(self._write_buf)
1084 if n > len(self._write_buf) or n < 0:
1085 raise IOError("write() returned incorrect number of bytes")
1086 del self._write_buf[:n]
1087 written += n
1088 except BlockingIOError as e:
1089 n = e.characters_written
1090 del self._write_buf[:n]
1091 written += n
1092 raise BlockingIOError(e.errno, e.strerror, written)
1093
1094 def tell(self):
1095 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1096
1097 def seek(self, pos, whence=0):
1098 if not (0 <= whence <= 2):
1099 raise ValueError("invalid whence")
1100 with self._write_lock:
1101 self._flush_unlocked()
1102 return _BufferedIOMixin.seek(self, pos, whence)
1103
1104
1105class BufferedRWPair(BufferedIOBase):
1106
1107 """A buffered reader and writer object together.
1108
1109 A buffered reader object and buffered writer object put together to
1110 form a sequential IO object that can read and write. This is typically
1111 used with a socket or two-way pipe.
1112
1113 reader and writer are RawIOBase objects that are readable and
1114 writeable respectively. If the buffer_size is omitted it defaults to
Benjamin Peterson59406a92009-03-26 17:10:29 +00001115 DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001116 """
1117
1118 # XXX The usefulness of this (compared to having two separate IO
1119 # objects) is questionable.
1120
1121 def __init__(self, reader, writer,
1122 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1123 """Constructor.
1124
1125 The arguments are two RawIO instances.
1126 """
Benjamin Peterson59406a92009-03-26 17:10:29 +00001127 if max_buffer_size is not None:
1128 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
Antoine Pitroucf4c7492009-04-19 00:09:36 +00001129
1130 if not reader.readable():
1131 raise IOError('"reader" argument must be readable.')
1132
1133 if not writer.writable():
1134 raise IOError('"writer" argument must be writable.')
1135
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001136 self.reader = BufferedReader(reader, buffer_size)
Benjamin Peterson59406a92009-03-26 17:10:29 +00001137 self.writer = BufferedWriter(writer, buffer_size)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001138
1139 def read(self, n=None):
1140 if n is None:
1141 n = -1
1142 return self.reader.read(n)
1143
1144 def readinto(self, b):
1145 return self.reader.readinto(b)
1146
1147 def write(self, b):
1148 return self.writer.write(b)
1149
1150 def peek(self, n=0):
1151 return self.reader.peek(n)
1152
1153 def read1(self, n):
1154 return self.reader.read1(n)
1155
1156 def readable(self):
1157 return self.reader.readable()
1158
1159 def writable(self):
1160 return self.writer.writable()
1161
1162 def flush(self):
1163 return self.writer.flush()
1164
1165 def close(self):
1166 self.writer.close()
1167 self.reader.close()
1168
1169 def isatty(self):
1170 return self.reader.isatty() or self.writer.isatty()
1171
1172 @property
1173 def closed(self):
1174 return self.writer.closed
1175
1176
1177class BufferedRandom(BufferedWriter, BufferedReader):
1178
1179 """A buffered interface to random access streams.
1180
1181 The constructor creates a reader and writer for a seekable stream,
1182 raw, given in the first argument. If the buffer_size is omitted it
Benjamin Peterson59406a92009-03-26 17:10:29 +00001183 defaults to DEFAULT_BUFFER_SIZE.
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001184 """
1185
Benjamin Peterson59406a92009-03-26 17:10:29 +00001186 _warning_stack_offset = 3
1187
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001188 def __init__(self, raw,
1189 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1190 raw._checkSeekable()
1191 BufferedReader.__init__(self, raw, buffer_size)
1192 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1193
1194 def seek(self, pos, whence=0):
1195 if not (0 <= whence <= 2):
1196 raise ValueError("invalid whence")
1197 self.flush()
1198 if self._read_buf:
1199 # Undo read ahead.
1200 with self._read_lock:
1201 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1202 # First do the raw seek, then empty the read buffer, so that
1203 # if the raw seek fails, we don't lose buffered data forever.
1204 pos = self.raw.seek(pos, whence)
1205 with self._read_lock:
1206 self._reset_read_buf()
1207 if pos < 0:
1208 raise IOError("seek() returned invalid position")
1209 return pos
1210
1211 def tell(self):
1212 if self._write_buf:
1213 return BufferedWriter.tell(self)
1214 else:
1215 return BufferedReader.tell(self)
1216
1217 def truncate(self, pos=None):
1218 if pos is None:
1219 pos = self.tell()
1220 # Use seek to flush the read buffer.
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001221 return BufferedWriter.truncate(self, pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001222
1223 def read(self, n=None):
1224 if n is None:
1225 n = -1
1226 self.flush()
1227 return BufferedReader.read(self, n)
1228
1229 def readinto(self, b):
1230 self.flush()
1231 return BufferedReader.readinto(self, b)
1232
1233 def peek(self, n=0):
1234 self.flush()
1235 return BufferedReader.peek(self, n)
1236
1237 def read1(self, n):
1238 self.flush()
1239 return BufferedReader.read1(self, n)
1240
1241 def write(self, b):
1242 if self._read_buf:
1243 # Undo readahead
1244 with self._read_lock:
1245 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1246 self._reset_read_buf()
1247 return BufferedWriter.write(self, b)
1248
1249
1250class TextIOBase(IOBase):
1251
1252 """Base class for text I/O.
1253
1254 This class provides a character and line based interface to stream
1255 I/O. There is no readinto method because Python's character strings
1256 are immutable. There is no public constructor.
1257 """
1258
Raymond Hettingerd2b03e12011-01-12 23:52:40 +00001259 def read(self, n = -1):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001260 """Read at most n characters from stream.
1261
1262 Read from underlying buffer until we have n characters or we hit EOF.
1263 If n is negative or omitted, read until EOF.
1264 """
1265 self._unsupported("read")
1266
Raymond Hettingerd2b03e12011-01-12 23:52:40 +00001267 def write(self, s):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001268 """Write string s to stream."""
1269 self._unsupported("write")
1270
Raymond Hettingerd2b03e12011-01-12 23:52:40 +00001271 def truncate(self, pos = None):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001272 """Truncate size to pos."""
1273 self._unsupported("truncate")
1274
Raymond Hettingerd2b03e12011-01-12 23:52:40 +00001275 def readline(self):
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001276 """Read until newline or EOF.
1277
1278 Returns an empty string if EOF is hit immediately.
1279 """
1280 self._unsupported("readline")
1281
Raymond Hettingerd2b03e12011-01-12 23:52:40 +00001282 def detach(self):
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001283 """
1284 Separate the underlying buffer from the TextIOBase and return it.
1285
1286 After the underlying buffer has been detached, the TextIO is in an
1287 unusable state.
1288 """
1289 self._unsupported("detach")
1290
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001291 @property
1292 def encoding(self):
1293 """Subclasses should override."""
1294 return None
1295
1296 @property
1297 def newlines(self):
1298 """Line endings translated so far.
1299
1300 Only line endings translated during reading are considered.
1301
1302 Subclasses should override.
1303 """
1304 return None
1305
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001306 @property
1307 def errors(self):
1308 """Error setting of the decoder or encoder.
1309
1310 Subclasses should override."""
1311 return None
1312
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001313io.TextIOBase.register(TextIOBase)
1314
1315
1316class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1317 r"""Codec used when reading a file in universal newlines mode. It wraps
1318 another incremental decoder, translating \r\n and \r into \n. It also
1319 records the types of newlines encountered. When used with
1320 translate=False, it ensures that the newline sequence is returned in
1321 one piece.
1322 """
1323 def __init__(self, decoder, translate, errors='strict'):
1324 codecs.IncrementalDecoder.__init__(self, errors=errors)
1325 self.translate = translate
1326 self.decoder = decoder
1327 self.seennl = 0
1328 self.pendingcr = False
1329
1330 def decode(self, input, final=False):
1331 # decode input (with the eventual \r from a previous pass)
1332 if self.decoder is None:
1333 output = input
1334 else:
1335 output = self.decoder.decode(input, final=final)
1336 if self.pendingcr and (output or final):
1337 output = "\r" + output
1338 self.pendingcr = False
1339
1340 # retain last \r even when not translating data:
1341 # then readline() is sure to get \r\n in one pass
1342 if output.endswith("\r") and not final:
1343 output = output[:-1]
1344 self.pendingcr = True
1345
1346 # Record which newlines are read
1347 crlf = output.count('\r\n')
1348 cr = output.count('\r') - crlf
1349 lf = output.count('\n') - crlf
1350 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1351 | (crlf and self._CRLF)
1352
1353 if self.translate:
1354 if crlf:
1355 output = output.replace("\r\n", "\n")
1356 if cr:
1357 output = output.replace("\r", "\n")
1358
1359 return output
1360
1361 def getstate(self):
1362 if self.decoder is None:
1363 buf = b""
1364 flag = 0
1365 else:
1366 buf, flag = self.decoder.getstate()
1367 flag <<= 1
1368 if self.pendingcr:
1369 flag |= 1
1370 return buf, flag
1371
1372 def setstate(self, state):
1373 buf, flag = state
1374 self.pendingcr = bool(flag & 1)
1375 if self.decoder is not None:
1376 self.decoder.setstate((buf, flag >> 1))
1377
1378 def reset(self):
1379 self.seennl = 0
1380 self.pendingcr = False
1381 if self.decoder is not None:
1382 self.decoder.reset()
1383
1384 _LF = 1
1385 _CR = 2
1386 _CRLF = 4
1387
1388 @property
1389 def newlines(self):
1390 return (None,
1391 "\n",
1392 "\r",
1393 ("\r", "\n"),
1394 "\r\n",
1395 ("\n", "\r\n"),
1396 ("\r", "\r\n"),
1397 ("\r", "\n", "\r\n")
1398 )[self.seennl]
1399
1400
1401class TextIOWrapper(TextIOBase):
1402
1403 r"""Character and line based layer over a BufferedIOBase object, buffer.
1404
1405 encoding gives the name of the encoding that the stream will be
1406 decoded or encoded with. It defaults to locale.getpreferredencoding.
1407
1408 errors determines the strictness of encoding and decoding (see the
1409 codecs.register) and defaults to "strict".
1410
1411 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1412 handling of line endings. If it is None, universal newlines is
1413 enabled. With this enabled, on input, the lines endings '\n', '\r',
1414 or '\r\n' are translated to '\n' before being returned to the
1415 caller. Conversely, on output, '\n' is translated to the system
1416 default line seperator, os.linesep. If newline is any other of its
1417 legal values, that newline becomes the newline when the file is read
1418 and it is returned untranslated. On output, '\n' is converted to the
1419 newline.
1420
1421 If line_buffering is True, a call to flush is implied when a call to
1422 write contains a newline character.
1423 """
1424
1425 _CHUNK_SIZE = 2048
1426
1427 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1428 line_buffering=False):
1429 if newline is not None and not isinstance(newline, str):
1430 raise TypeError("illegal newline type: %r" % (type(newline),))
1431 if newline not in (None, "", "\n", "\r", "\r\n"):
1432 raise ValueError("illegal newline value: %r" % (newline,))
1433 if encoding is None:
1434 try:
1435 encoding = os.device_encoding(buffer.fileno())
1436 except (AttributeError, UnsupportedOperation):
1437 pass
1438 if encoding is None:
1439 try:
1440 import locale
1441 except ImportError:
1442 # Importing locale may fail if Python is being built
1443 encoding = "ascii"
1444 else:
1445 encoding = locale.getpreferredencoding()
1446
1447 if not isinstance(encoding, str):
1448 raise ValueError("invalid encoding: %r" % encoding)
1449
1450 if errors is None:
1451 errors = "strict"
1452 else:
1453 if not isinstance(errors, str):
1454 raise ValueError("invalid errors: %r" % errors)
1455
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001456 self._buffer = buffer
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001457 self._line_buffering = line_buffering
1458 self._encoding = encoding
1459 self._errors = errors
1460 self._readuniversal = not newline
1461 self._readtranslate = newline is None
1462 self._readnl = newline
1463 self._writetranslate = newline != ''
1464 self._writenl = newline or os.linesep
1465 self._encoder = None
1466 self._decoder = None
1467 self._decoded_chars = '' # buffer for text returned from decoder
1468 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1469 self._snapshot = None # info for reconstructing decoder state
1470 self._seekable = self._telling = self.buffer.seekable()
1471
Antoine Pitroue4501852009-05-14 18:55:55 +00001472 if self._seekable and self.writable():
1473 position = self.buffer.tell()
1474 if position != 0:
1475 try:
1476 self._get_encoder().setstate(0)
1477 except LookupError:
1478 # Sometimes the encoder doesn't exist
1479 pass
1480
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001481 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1482 # where dec_flags is the second (integer) item of the decoder state
1483 # and next_input is the chunk of input bytes that comes next after the
1484 # snapshot point. We use this to reconstruct decoder states in tell().
1485
1486 # Naming convention:
1487 # - "bytes_..." for integer variables that count input bytes
1488 # - "chars_..." for integer variables that count decoded characters
1489
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001490 def __repr__(self):
Antoine Pitrou716c4442009-05-23 19:04:03 +00001491 try:
1492 name = self.name
1493 except AttributeError:
1494 return "<_pyio.TextIOWrapper encoding={0!r}>".format(self.encoding)
1495 else:
1496 return "<_pyio.TextIOWrapper name={0!r} encoding={1!r}>".format(
1497 name, self.encoding)
Benjamin Petersonc4c0eae2009-03-09 00:07:03 +00001498
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001499 @property
1500 def encoding(self):
1501 return self._encoding
1502
1503 @property
1504 def errors(self):
1505 return self._errors
1506
1507 @property
1508 def line_buffering(self):
1509 return self._line_buffering
1510
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001511 @property
1512 def buffer(self):
1513 return self._buffer
1514
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001515 def seekable(self):
1516 return self._seekable
1517
1518 def readable(self):
1519 return self.buffer.readable()
1520
1521 def writable(self):
1522 return self.buffer.writable()
1523
1524 def flush(self):
1525 self.buffer.flush()
1526 self._telling = self._seekable
1527
1528 def close(self):
Antoine Pitroufaf90072010-05-03 16:58:19 +00001529 if self.buffer is not None and not self.closed:
1530 self.flush()
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001531 self.buffer.close()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001532
1533 @property
1534 def closed(self):
1535 return self.buffer.closed
1536
1537 @property
1538 def name(self):
1539 return self.buffer.name
1540
1541 def fileno(self):
1542 return self.buffer.fileno()
1543
1544 def isatty(self):
1545 return self.buffer.isatty()
1546
1547 def write(self, s: str):
1548 if self.closed:
1549 raise ValueError("write to closed file")
1550 if not isinstance(s, str):
1551 raise TypeError("can't write %s to text stream" %
1552 s.__class__.__name__)
1553 length = len(s)
1554 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1555 if haslf and self._writetranslate and self._writenl != "\n":
1556 s = s.replace("\n", self._writenl)
1557 encoder = self._encoder or self._get_encoder()
1558 # XXX What if we were just reading?
1559 b = encoder.encode(s)
1560 self.buffer.write(b)
1561 if self._line_buffering and (haslf or "\r" in s):
1562 self.flush()
1563 self._snapshot = None
1564 if self._decoder:
1565 self._decoder.reset()
1566 return length
1567
1568 def _get_encoder(self):
1569 make_encoder = codecs.getincrementalencoder(self._encoding)
1570 self._encoder = make_encoder(self._errors)
1571 return self._encoder
1572
1573 def _get_decoder(self):
1574 make_decoder = codecs.getincrementaldecoder(self._encoding)
1575 decoder = make_decoder(self._errors)
1576 if self._readuniversal:
1577 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1578 self._decoder = decoder
1579 return decoder
1580
1581 # The following three methods implement an ADT for _decoded_chars.
1582 # Text returned from the decoder is buffered here until the client
1583 # requests it by calling our read() or readline() method.
1584 def _set_decoded_chars(self, chars):
1585 """Set the _decoded_chars buffer."""
1586 self._decoded_chars = chars
1587 self._decoded_chars_used = 0
1588
1589 def _get_decoded_chars(self, n=None):
1590 """Advance into the _decoded_chars buffer."""
1591 offset = self._decoded_chars_used
1592 if n is None:
1593 chars = self._decoded_chars[offset:]
1594 else:
1595 chars = self._decoded_chars[offset:offset + n]
1596 self._decoded_chars_used += len(chars)
1597 return chars
1598
1599 def _rewind_decoded_chars(self, n):
1600 """Rewind the _decoded_chars buffer."""
1601 if self._decoded_chars_used < n:
1602 raise AssertionError("rewind decoded_chars out of bounds")
1603 self._decoded_chars_used -= n
1604
1605 def _read_chunk(self):
1606 """
1607 Read and decode the next chunk of data from the BufferedReader.
1608 """
1609
1610 # The return value is True unless EOF was reached. The decoded
1611 # string is placed in self._decoded_chars (replacing its previous
1612 # value). The entire input chunk is sent to the decoder, though
1613 # some of it may remain buffered in the decoder, yet to be
1614 # converted.
1615
1616 if self._decoder is None:
1617 raise ValueError("no decoder")
1618
1619 if self._telling:
1620 # To prepare for tell(), we need to snapshot a point in the
1621 # file where the decoder's input buffer is empty.
1622
1623 dec_buffer, dec_flags = self._decoder.getstate()
1624 # Given this, we know there was a valid snapshot point
1625 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1626
1627 # Read a chunk, decode it, and put the result in self._decoded_chars.
1628 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1629 eof = not input_chunk
1630 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1631
1632 if self._telling:
1633 # At the snapshot point, len(dec_buffer) bytes before the read,
1634 # the next input to be decoded is dec_buffer + input_chunk.
1635 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1636
1637 return not eof
1638
1639 def _pack_cookie(self, position, dec_flags=0,
1640 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1641 # The meaning of a tell() cookie is: seek to position, set the
1642 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1643 # into the decoder with need_eof as the EOF flag, then skip
1644 # chars_to_skip characters of the decoded result. For most simple
1645 # decoders, tell() will often just give a byte offset in the file.
1646 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1647 (chars_to_skip<<192) | bool(need_eof)<<256)
1648
1649 def _unpack_cookie(self, bigint):
1650 rest, position = divmod(bigint, 1<<64)
1651 rest, dec_flags = divmod(rest, 1<<64)
1652 rest, bytes_to_feed = divmod(rest, 1<<64)
1653 need_eof, chars_to_skip = divmod(rest, 1<<64)
1654 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1655
1656 def tell(self):
1657 if not self._seekable:
1658 raise IOError("underlying stream is not seekable")
1659 if not self._telling:
1660 raise IOError("telling position disabled by next() call")
1661 self.flush()
1662 position = self.buffer.tell()
1663 decoder = self._decoder
1664 if decoder is None or self._snapshot is None:
1665 if self._decoded_chars:
1666 # This should never happen.
1667 raise AssertionError("pending decoded text")
1668 return position
1669
1670 # Skip backward to the snapshot point (see _read_chunk).
1671 dec_flags, next_input = self._snapshot
1672 position -= len(next_input)
1673
1674 # How many decoded characters have been used up since the snapshot?
1675 chars_to_skip = self._decoded_chars_used
1676 if chars_to_skip == 0:
1677 # We haven't moved from the snapshot point.
1678 return self._pack_cookie(position, dec_flags)
1679
1680 # Starting from the snapshot position, we will walk the decoder
1681 # forward until it gives us enough decoded characters.
1682 saved_state = decoder.getstate()
1683 try:
1684 # Note our initial start point.
1685 decoder.setstate((b'', dec_flags))
1686 start_pos = position
1687 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1688 need_eof = 0
1689
1690 # Feed the decoder one byte at a time. As we go, note the
1691 # nearest "safe start point" before the current location
1692 # (a point where the decoder has nothing buffered, so seek()
1693 # can safely start from there and advance to this location).
1694 next_byte = bytearray(1)
1695 for next_byte[0] in next_input:
1696 bytes_fed += 1
1697 chars_decoded += len(decoder.decode(next_byte))
1698 dec_buffer, dec_flags = decoder.getstate()
1699 if not dec_buffer and chars_decoded <= chars_to_skip:
1700 # Decoder buffer is empty, so this is a safe start point.
1701 start_pos += bytes_fed
1702 chars_to_skip -= chars_decoded
1703 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1704 if chars_decoded >= chars_to_skip:
1705 break
1706 else:
1707 # We didn't get enough decoded data; signal EOF to get more.
1708 chars_decoded += len(decoder.decode(b'', final=True))
1709 need_eof = 1
1710 if chars_decoded < chars_to_skip:
1711 raise IOError("can't reconstruct logical file position")
1712
1713 # The returned cookie corresponds to the last safe start point.
1714 return self._pack_cookie(
1715 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1716 finally:
1717 decoder.setstate(saved_state)
1718
1719 def truncate(self, pos=None):
1720 self.flush()
1721 if pos is None:
1722 pos = self.tell()
Antoine Pitrou66f9fea2010-01-31 23:20:26 +00001723 return self.buffer.truncate(pos)
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001724
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001725 def detach(self):
1726 if self.buffer is None:
1727 raise ValueError("buffer is already detached")
1728 self.flush()
Antoine Pitrou6cfc5122010-12-21 21:26:09 +00001729 buffer = self._buffer
1730 self._buffer = None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001731 return buffer
1732
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001733 def seek(self, cookie, whence=0):
1734 if self.closed:
1735 raise ValueError("tell on closed file")
1736 if not self._seekable:
1737 raise IOError("underlying stream is not seekable")
1738 if whence == 1: # seek relative to current position
1739 if cookie != 0:
1740 raise IOError("can't do nonzero cur-relative seeks")
1741 # Seeking to the current position should attempt to
1742 # sync the underlying buffer with the current position.
1743 whence = 0
1744 cookie = self.tell()
1745 if whence == 2: # seek relative to end of file
1746 if cookie != 0:
1747 raise IOError("can't do nonzero end-relative seeks")
1748 self.flush()
1749 position = self.buffer.seek(0, 2)
1750 self._set_decoded_chars('')
1751 self._snapshot = None
1752 if self._decoder:
1753 self._decoder.reset()
1754 return position
1755 if whence != 0:
1756 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1757 (whence,))
1758 if cookie < 0:
1759 raise ValueError("negative seek position %r" % (cookie,))
1760 self.flush()
1761
1762 # The strategy of seek() is to go back to the safe start point
1763 # and replay the effect of read(chars_to_skip) from there.
1764 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1765 self._unpack_cookie(cookie)
1766
1767 # Seek back to the safe start point.
1768 self.buffer.seek(start_pos)
1769 self._set_decoded_chars('')
1770 self._snapshot = None
1771
1772 # Restore the decoder to its state from the safe start point.
Benjamin Peterson9363a652009-03-05 00:42:09 +00001773 if cookie == 0 and self._decoder:
1774 self._decoder.reset()
1775 elif self._decoder or dec_flags or chars_to_skip:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001776 self._decoder = self._decoder or self._get_decoder()
1777 self._decoder.setstate((b'', dec_flags))
1778 self._snapshot = (dec_flags, b'')
1779
1780 if chars_to_skip:
1781 # Just like _read_chunk, feed the decoder and save a snapshot.
1782 input_chunk = self.buffer.read(bytes_to_feed)
1783 self._set_decoded_chars(
1784 self._decoder.decode(input_chunk, need_eof))
1785 self._snapshot = (dec_flags, input_chunk)
1786
1787 # Skip chars_to_skip of the decoded characters.
1788 if len(self._decoded_chars) < chars_to_skip:
1789 raise IOError("can't restore logical file position")
1790 self._decoded_chars_used = chars_to_skip
1791
Antoine Pitroue4501852009-05-14 18:55:55 +00001792 # Finally, reset the encoder (merely useful for proper BOM handling)
1793 try:
1794 encoder = self._encoder or self._get_encoder()
1795 except LookupError:
1796 # Sometimes the encoder doesn't exist
1797 pass
1798 else:
1799 if cookie != 0:
1800 encoder.setstate(0)
1801 else:
1802 encoder.reset()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001803 return cookie
1804
1805 def read(self, n=None):
Benjamin Petersona1b49012009-03-31 23:11:32 +00001806 self._checkReadable()
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001807 if n is None:
1808 n = -1
1809 decoder = self._decoder or self._get_decoder()
1810 if n < 0:
1811 # Read everything.
1812 result = (self._get_decoded_chars() +
1813 decoder.decode(self.buffer.read(), final=True))
1814 self._set_decoded_chars('')
1815 self._snapshot = None
1816 return result
1817 else:
1818 # Keep reading chunks until we have n characters to return.
1819 eof = False
1820 result = self._get_decoded_chars(n)
1821 while len(result) < n and not eof:
1822 eof = not self._read_chunk()
1823 result += self._get_decoded_chars(n - len(result))
1824 return result
1825
1826 def __next__(self):
1827 self._telling = False
1828 line = self.readline()
1829 if not line:
1830 self._snapshot = None
1831 self._telling = self._seekable
1832 raise StopIteration
1833 return line
1834
1835 def readline(self, limit=None):
1836 if self.closed:
1837 raise ValueError("read from closed file")
1838 if limit is None:
1839 limit = -1
Benjamin Petersonb01138a2009-04-24 22:59:52 +00001840 elif not isinstance(limit, int):
1841 raise TypeError("limit must be an integer")
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001842
1843 # Grab all the decoded text (we will rewind any extra bits later).
1844 line = self._get_decoded_chars()
1845
1846 start = 0
1847 # Make the decoder if it doesn't already exist.
1848 if not self._decoder:
1849 self._get_decoder()
1850
1851 pos = endpos = None
1852 while True:
1853 if self._readtranslate:
1854 # Newlines are already translated, only search for \n
1855 pos = line.find('\n', start)
1856 if pos >= 0:
1857 endpos = pos + 1
1858 break
1859 else:
1860 start = len(line)
1861
1862 elif self._readuniversal:
1863 # Universal newline search. Find any of \r, \r\n, \n
1864 # The decoder ensures that \r\n are not split in two pieces
1865
1866 # In C we'd look for these in parallel of course.
1867 nlpos = line.find("\n", start)
1868 crpos = line.find("\r", start)
1869 if crpos == -1:
1870 if nlpos == -1:
1871 # Nothing found
1872 start = len(line)
1873 else:
1874 # Found \n
1875 endpos = nlpos + 1
1876 break
1877 elif nlpos == -1:
1878 # Found lone \r
1879 endpos = crpos + 1
1880 break
1881 elif nlpos < crpos:
1882 # Found \n
1883 endpos = nlpos + 1
1884 break
1885 elif nlpos == crpos + 1:
1886 # Found \r\n
1887 endpos = crpos + 2
1888 break
1889 else:
1890 # Found \r
1891 endpos = crpos + 1
1892 break
1893 else:
1894 # non-universal
1895 pos = line.find(self._readnl)
1896 if pos >= 0:
1897 endpos = pos + len(self._readnl)
1898 break
1899
1900 if limit >= 0 and len(line) >= limit:
1901 endpos = limit # reached length limit
1902 break
1903
1904 # No line ending seen yet - get more data'
1905 while self._read_chunk():
1906 if self._decoded_chars:
1907 break
1908 if self._decoded_chars:
1909 line += self._get_decoded_chars()
1910 else:
1911 # end of file
1912 self._set_decoded_chars('')
1913 self._snapshot = None
1914 return line
1915
1916 if limit >= 0 and endpos > limit:
1917 endpos = limit # don't exceed limit
1918
1919 # Rewind _decoded_chars to just after the line ending we found.
1920 self._rewind_decoded_chars(len(line) - endpos)
1921 return line[:endpos]
1922
1923 @property
1924 def newlines(self):
1925 return self._decoder.newlines if self._decoder else None
1926
1927
1928class StringIO(TextIOWrapper):
1929 """Text I/O implementation using an in-memory buffer.
1930
1931 The initial_value argument sets the value of object. The newline
1932 argument is like the one of TextIOWrapper's constructor.
1933 """
1934
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001935 def __init__(self, initial_value="", newline="\n"):
1936 super(StringIO, self).__init__(BytesIO(),
1937 encoding="utf-8",
1938 errors="strict",
1939 newline=newline)
Antoine Pitrou11446482009-04-04 14:09:30 +00001940 # Issue #5645: make universal newlines semantics the same as in the
1941 # C version, even under Windows.
1942 if newline is None:
1943 self._writetranslate = False
Georg Brandl194da4a2009-08-13 09:34:05 +00001944 if initial_value is not None:
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001945 if not isinstance(initial_value, str):
Georg Brandl194da4a2009-08-13 09:34:05 +00001946 raise TypeError("initial_value must be str or None, not {0}"
1947 .format(type(initial_value).__name__))
Benjamin Peterson4fa88fa2009-03-04 00:14:51 +00001948 initial_value = str(initial_value)
1949 self.write(initial_value)
1950 self.seek(0)
1951
1952 def getvalue(self):
1953 self.flush()
1954 return self.buffer.getvalue().decode(self._encoding, self._errors)
Benjamin Peterson9fd459a2009-03-09 00:09:44 +00001955
1956 def __repr__(self):
1957 # TextIOWrapper tells the encoding in its repr. In StringIO,
1958 # that's a implementation detail.
1959 return object.__repr__(self)
Benjamin Petersonb487e632009-03-21 03:08:31 +00001960
1961 @property
Benjamin Peterson0926ad12009-06-06 18:02:12 +00001962 def errors(self):
1963 return None
1964
1965 @property
Benjamin Petersonb487e632009-03-21 03:08:31 +00001966 def encoding(self):
1967 return None
Benjamin Petersond2e0c792009-05-01 20:40:59 +00001968
1969 def detach(self):
1970 # This doesn't make sense on StringIO.
1971 self._unsupported("detach")