blob: 7378f670e8c66472c89fa74f376784091d39b107 [file] [log] [blame]
Antoine Pitrou19690592009-06-12 20:14:08 +00001"""
2Python implementation of the io module.
3"""
4
Benjamin Petersonfed4abc2010-04-27 21:17:22 +00005from __future__ import (print_function, unicode_literals)
Antoine Pitrou19690592009-06-12 20:14:08 +00006
7import os
8import abc
9import codecs
10import warnings
Benjamin Peterson5e9cc5e2010-04-27 21:15:28 +000011# Import thread instead of threading to reduce startup cost
Antoine Pitrou19690592009-06-12 20:14:08 +000012try:
13 from thread import allocate_lock as Lock
14except ImportError:
15 from dummy_thread import allocate_lock as Lock
16
17import io
Benjamin Peterson27737252010-04-27 21:18:30 +000018from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
Antoine Pitrou19690592009-06-12 20:14:08 +000019
20__metaclass__ = type
21
22# open() uses st_blksize whenever we can
23DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
24
25# NOTE: Base classes defined here are registered with the "official" ABCs
26# defined in io.py. We don't use real inheritance though, because we don't
27# want to inherit the C implementations.
28
29
30class BlockingIOError(IOError):
31
32 """Exception raised when I/O would block on a non-blocking I/O stream."""
33
34 def __init__(self, errno, strerror, characters_written=0):
35 super(IOError, self).__init__(errno, strerror)
36 if not isinstance(characters_written, (int, long)):
37 raise TypeError("characters_written must be a integer")
38 self.characters_written = characters_written
39
40
Benjamin Petersona9bd6d52010-04-27 21:01:54 +000041def open(file, mode="r", buffering=-1,
Antoine Pitrou19690592009-06-12 20:14:08 +000042 encoding=None, errors=None,
43 newline=None, closefd=True):
44
45 r"""Open file and return a stream. Raise IOError upon failure.
46
47 file is either a text or byte string giving the name (and the path
48 if the file isn't in the current working directory) of the file to
49 be opened or an integer file descriptor of the file to be
50 wrapped. (If a file descriptor is given, it is closed when the
51 returned I/O object is closed, unless closefd is set to False.)
52
53 mode is an optional string that specifies the mode in which the file
54 is opened. It defaults to 'r' which means open for reading in text
55 mode. Other common values are 'w' for writing (truncating the file if
56 it already exists), and 'a' for appending (which on some Unix systems,
57 means that all writes append to the end of the file regardless of the
58 current seek position). In text mode, if encoding is not specified the
59 encoding used is platform dependent. (For reading and writing raw
60 bytes use binary mode and leave encoding unspecified.) The available
61 modes are:
62
63 ========= ===============================================================
64 Character Meaning
65 --------- ---------------------------------------------------------------
66 'r' open for reading (default)
67 'w' open for writing, truncating the file first
68 'a' open for writing, appending to the end of the file if it exists
69 'b' binary mode
70 't' text mode (default)
71 '+' open a disk file for updating (reading and writing)
72 'U' universal newline mode (for backwards compatibility; unneeded
73 for new code)
74 ========= ===============================================================
75
76 The default mode is 'rt' (open for reading text). For binary random
77 access, the mode 'w+b' opens and truncates the file to 0 bytes, while
78 'r+b' opens the file without truncation.
79
80 Python distinguishes between files opened in binary and text modes,
81 even when the underlying operating system doesn't. Files opened in
82 binary mode (appending 'b' to the mode argument) return contents as
83 bytes objects without any decoding. In text mode (the default, or when
84 't' is appended to the mode argument), the contents of the file are
85 returned as strings, the bytes having been first decoded using a
86 platform-dependent encoding or using the specified encoding if given.
87
Antoine Pitroue812d292009-12-19 21:01:10 +000088 buffering is an optional integer used to set the buffering policy.
89 Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
90 line buffering (only usable in text mode), and an integer > 1 to indicate
91 the size of a fixed-size chunk buffer. When no buffering argument is
92 given, the default buffering policy works as follows:
93
94 * Binary files are buffered in fixed-size chunks; the size of the buffer
95 is chosen using a heuristic trying to determine the underlying device's
96 "block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
97 On many systems, the buffer will typically be 4096 or 8192 bytes long.
98
99 * "Interactive" text files (files for which isatty() returns True)
100 use line buffering. Other text files use the policy described above
101 for binary files.
102
Antoine Pitrou19690592009-06-12 20:14:08 +0000103 encoding is the name of the encoding used to decode or encode the
104 file. This should only be used in text mode. The default encoding is
105 platform dependent, but any encoding supported by Python can be
106 passed. See the codecs module for the list of supported encodings.
107
108 errors is an optional string that specifies how encoding errors are to
109 be handled---this argument should not be used in binary mode. Pass
110 'strict' to raise a ValueError exception if there is an encoding error
111 (the default of None has the same effect), or pass 'ignore' to ignore
112 errors. (Note that ignoring encoding errors can lead to data loss.)
113 See the documentation for codecs.register for a list of the permitted
114 encoding error strings.
115
116 newline controls how universal newlines works (it only applies to text
117 mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
118 follows:
119
120 * On input, if newline is None, universal newlines mode is
121 enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
122 these are translated into '\n' before being returned to the
123 caller. If it is '', universal newline mode is enabled, but line
124 endings are returned to the caller untranslated. If it has any of
125 the other legal values, input lines are only terminated by the given
126 string, and the line ending is returned to the caller untranslated.
127
128 * On output, if newline is None, any '\n' characters written are
129 translated to the system default line separator, os.linesep. If
130 newline is '', no translation takes place. If newline is any of the
131 other legal values, any '\n' characters written are translated to
132 the given string.
133
134 If closefd is False, the underlying file descriptor will be kept open
135 when the file is closed. This does not work when a file name is given
136 and must be True in that case.
137
138 open() returns a file object whose type depends on the mode, and
139 through which the standard file operations such as reading and writing
140 are performed. When open() is used to open a file in a text mode ('w',
141 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
142 a file in a binary mode, the returned class varies: in read binary
143 mode, it returns a BufferedReader; in write binary and append binary
144 modes, it returns a BufferedWriter, and in read/write mode, it returns
145 a BufferedRandom.
146
147 It is also possible to use a string or bytearray as a file for both
148 reading and writing. For strings StringIO can be used like a file
149 opened in a text mode, and for bytes a BytesIO can be used like a file
150 opened in a binary mode.
151 """
152 if not isinstance(file, (basestring, int, long)):
153 raise TypeError("invalid file: %r" % file)
154 if not isinstance(mode, basestring):
155 raise TypeError("invalid mode: %r" % mode)
Benjamin Petersona9bd6d52010-04-27 21:01:54 +0000156 if not isinstance(buffering, (int, long)):
Antoine Pitrou19690592009-06-12 20:14:08 +0000157 raise TypeError("invalid buffering: %r" % buffering)
158 if encoding is not None and not isinstance(encoding, basestring):
159 raise TypeError("invalid encoding: %r" % encoding)
160 if errors is not None and not isinstance(errors, basestring):
161 raise TypeError("invalid errors: %r" % errors)
162 modes = set(mode)
163 if modes - set("arwb+tU") or len(mode) > len(modes):
164 raise ValueError("invalid mode: %r" % mode)
165 reading = "r" in modes
166 writing = "w" in modes
167 appending = "a" in modes
168 updating = "+" in modes
169 text = "t" in modes
170 binary = "b" in modes
171 if "U" in modes:
172 if writing or appending:
173 raise ValueError("can't use U and writing mode at once")
174 reading = True
175 if text and binary:
176 raise ValueError("can't have text and binary mode at once")
177 if reading + writing + appending > 1:
178 raise ValueError("can't have read/write/append mode at once")
179 if not (reading or writing or appending):
180 raise ValueError("must have exactly one of read/write/append mode")
181 if binary and encoding is not None:
182 raise ValueError("binary mode doesn't take an encoding argument")
183 if binary and errors is not None:
184 raise ValueError("binary mode doesn't take an errors argument")
185 if binary and newline is not None:
186 raise ValueError("binary mode doesn't take a newline argument")
187 raw = FileIO(file,
188 (reading and "r" or "") +
189 (writing and "w" or "") +
190 (appending and "a" or "") +
191 (updating and "+" or ""),
192 closefd)
Antoine Pitrou19690592009-06-12 20:14:08 +0000193 line_buffering = False
194 if buffering == 1 or buffering < 0 and raw.isatty():
195 buffering = -1
196 line_buffering = True
197 if buffering < 0:
198 buffering = DEFAULT_BUFFER_SIZE
199 try:
200 bs = os.fstat(raw.fileno()).st_blksize
201 except (os.error, AttributeError):
202 pass
203 else:
204 if bs > 1:
205 buffering = bs
206 if buffering < 0:
207 raise ValueError("invalid buffering size")
208 if buffering == 0:
209 if binary:
210 return raw
211 raise ValueError("can't have unbuffered text I/O")
212 if updating:
213 buffer = BufferedRandom(raw, buffering)
214 elif writing or appending:
215 buffer = BufferedWriter(raw, buffering)
216 elif reading:
217 buffer = BufferedReader(raw, buffering)
218 else:
219 raise ValueError("unknown mode: %r" % mode)
220 if binary:
221 return buffer
222 text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
223 text.mode = mode
224 return text
225
226
227class DocDescriptor:
228 """Helper for builtins.open.__doc__
229 """
230 def __get__(self, obj, typ):
231 return (
Benjamin Petersonae9f8bd2010-04-27 21:19:06 +0000232 "open(file, mode='r', buffering=-1, encoding=None, "
Antoine Pitrou19690592009-06-12 20:14:08 +0000233 "errors=None, newline=None, closefd=True)\n\n" +
234 open.__doc__)
235
236class OpenWrapper:
237 """Wrapper for builtins.open
238
239 Trick so that open won't become a bound method when stored
240 as a class variable (as dbm.dumb does).
241
242 See initstdio() in Python/pythonrun.c.
243 """
244 __doc__ = DocDescriptor()
245
246 def __new__(cls, *args, **kwargs):
247 return open(*args, **kwargs)
248
249
250class UnsupportedOperation(ValueError, IOError):
251 pass
252
253
254class IOBase:
255 __metaclass__ = abc.ABCMeta
256
257 """The abstract base class for all I/O classes, acting on streams of
258 bytes. There is no public constructor.
259
260 This class provides dummy implementations for many methods that
261 derived classes can override selectively; the default implementations
262 represent a file that cannot be read, written or seeked.
263
264 Even though IOBase does not declare read, readinto, or write because
265 their signatures will vary, implementations and clients should
266 consider those methods part of the interface. Also, implementations
267 may raise a IOError when operations they do not support are called.
268
269 The basic type used for binary data read from or written to a file is
270 bytes. bytearrays are accepted too, and in some cases (such as
271 readinto) needed. Text I/O classes work with str data.
272
273 Note that calling any method (even inquiries) on a closed stream is
274 undefined. Implementations may raise IOError in this case.
275
276 IOBase (and its subclasses) support the iterator protocol, meaning
277 that an IOBase object can be iterated over yielding the lines in a
278 stream.
279
280 IOBase also supports the :keyword:`with` statement. In this example,
281 fp is closed after the suite of the with statement is complete:
282
283 with open('spam.txt', 'r') as fp:
284 fp.write('Spam and eggs!')
285 """
286
287 ### Internal ###
288
289 def _unsupported(self, name):
290 """Internal: raise an exception for unsupported operations."""
291 raise UnsupportedOperation("%s.%s() not supported" %
292 (self.__class__.__name__, name))
293
294 ### Positioning ###
295
296 def seek(self, pos, whence=0):
297 """Change stream position.
298
299 Change the stream position to byte offset offset. offset is
300 interpreted relative to the position indicated by whence. Values
301 for whence are:
302
303 * 0 -- start of stream (the default); offset should be zero or positive
304 * 1 -- current stream position; offset may be negative
305 * 2 -- end of stream; offset is usually negative
306
307 Return the new absolute position.
308 """
309 self._unsupported("seek")
310
311 def tell(self):
312 """Return current stream position."""
313 return self.seek(0, 1)
314
315 def truncate(self, pos=None):
316 """Truncate file to size bytes.
317
318 Size defaults to the current IO position as reported by tell(). Return
319 the new size.
320 """
321 self._unsupported("truncate")
322
323 ### Flush and close ###
324
325 def flush(self):
326 """Flush write buffers, if applicable.
327
328 This is not implemented for read-only and non-blocking streams.
329 """
Antoine Pitrouf7fd8e42010-05-03 16:25:33 +0000330 self._checkClosed()
Antoine Pitrou19690592009-06-12 20:14:08 +0000331 # XXX Should this return the number of bytes written???
332
333 __closed = False
334
335 def close(self):
336 """Flush and close the IO object.
337
338 This method has no effect if the file is already closed.
339 """
340 if not self.__closed:
Antoine Pitrouf7fd8e42010-05-03 16:25:33 +0000341 self.flush()
Antoine Pitrou19690592009-06-12 20:14:08 +0000342 self.__closed = True
343
344 def __del__(self):
345 """Destructor. Calls close()."""
346 # The try/except block is in case this is called at program
347 # exit time, when it's possible that globals have already been
348 # deleted, and then the close() call might fail. Since
349 # there's nothing we can do about such failures and they annoy
350 # the end users, we suppress the traceback.
351 try:
352 self.close()
353 except:
354 pass
355
356 ### Inquiries ###
357
358 def seekable(self):
359 """Return whether object supports random access.
360
361 If False, seek(), tell() and truncate() will raise IOError.
362 This method may need to do a test seek().
363 """
364 return False
365
366 def _checkSeekable(self, msg=None):
367 """Internal: raise an IOError if file is not seekable
368 """
369 if not self.seekable():
370 raise IOError("File or stream is not seekable."
371 if msg is None else msg)
372
373
374 def readable(self):
375 """Return whether object was opened for reading.
376
377 If False, read() will raise IOError.
378 """
379 return False
380
381 def _checkReadable(self, msg=None):
382 """Internal: raise an IOError if file is not readable
383 """
384 if not self.readable():
385 raise IOError("File or stream is not readable."
386 if msg is None else msg)
387
388 def writable(self):
389 """Return whether object was opened for writing.
390
391 If False, write() and truncate() will raise IOError.
392 """
393 return False
394
395 def _checkWritable(self, msg=None):
396 """Internal: raise an IOError if file is not writable
397 """
398 if not self.writable():
399 raise IOError("File or stream is not writable."
400 if msg is None else msg)
401
402 @property
403 def closed(self):
404 """closed: bool. True iff the file has been closed.
405
406 For backwards compatibility, this is a property, not a predicate.
407 """
408 return self.__closed
409
410 def _checkClosed(self, msg=None):
411 """Internal: raise an ValueError if file is closed
412 """
413 if self.closed:
414 raise ValueError("I/O operation on closed file."
415 if msg is None else msg)
416
417 ### Context manager ###
418
419 def __enter__(self):
420 """Context management protocol. Returns self."""
421 self._checkClosed()
422 return self
423
424 def __exit__(self, *args):
425 """Context management protocol. Calls close()"""
426 self.close()
427
428 ### Lower-level APIs ###
429
430 # XXX Should these be present even if unimplemented?
431
432 def fileno(self):
433 """Returns underlying file descriptor if one exists.
434
435 An IOError is raised if the IO object does not use a file descriptor.
436 """
437 self._unsupported("fileno")
438
439 def isatty(self):
440 """Return whether this is an 'interactive' stream.
441
442 Return False if it can't be determined.
443 """
444 self._checkClosed()
445 return False
446
447 ### Readline[s] and writelines ###
448
449 def readline(self, limit=-1):
450 r"""Read and return a line from the stream.
451
452 If limit is specified, at most limit bytes will be read.
453
454 The line terminator is always b'\n' for binary files; for text
455 files, the newlines argument to open can be used to select the line
456 terminator(s) recognized.
457 """
458 # For backwards compatibility, a (slowish) readline().
459 if hasattr(self, "peek"):
460 def nreadahead():
461 readahead = self.peek(1)
462 if not readahead:
463 return 1
464 n = (readahead.find(b"\n") + 1) or len(readahead)
465 if limit >= 0:
466 n = min(n, limit)
467 return n
468 else:
469 def nreadahead():
470 return 1
471 if limit is None:
472 limit = -1
473 elif not isinstance(limit, (int, long)):
474 raise TypeError("limit must be an integer")
475 res = bytearray()
476 while limit < 0 or len(res) < limit:
477 b = self.read(nreadahead())
478 if not b:
479 break
480 res += b
481 if res.endswith(b"\n"):
482 break
483 return bytes(res)
484
485 def __iter__(self):
486 self._checkClosed()
487 return self
488
489 def next(self):
490 line = self.readline()
491 if not line:
492 raise StopIteration
493 return line
494
495 def readlines(self, hint=None):
496 """Return a list of lines from the stream.
497
498 hint can be specified to control the number of lines read: no more
499 lines will be read if the total size (in bytes/characters) of all
500 lines so far exceeds hint.
501 """
502 if hint is not None and not isinstance(hint, (int, long)):
503 raise TypeError("integer or None expected")
504 if hint is None or hint <= 0:
505 return list(self)
506 n = 0
507 lines = []
508 for line in self:
509 lines.append(line)
510 n += len(line)
511 if n >= hint:
512 break
513 return lines
514
515 def writelines(self, lines):
516 self._checkClosed()
517 for line in lines:
518 self.write(line)
519
520io.IOBase.register(IOBase)
521
522
523class RawIOBase(IOBase):
524
525 """Base class for raw binary I/O."""
526
527 # The read() method is implemented by calling readinto(); derived
528 # classes that want to support read() only need to implement
529 # readinto() as a primitive operation. In general, readinto() can be
530 # more efficient than read().
531
532 # (It would be tempting to also provide an implementation of
533 # readinto() in terms of read(), in case the latter is a more suitable
534 # primitive operation, but that would lead to nasty recursion in case
535 # a subclass doesn't implement either.)
536
537 def read(self, n=-1):
538 """Read and return up to n bytes.
539
540 Returns an empty bytes object on EOF, or None if the object is
541 set not to block and has no data to read.
542 """
543 if n is None:
544 n = -1
545 if n < 0:
546 return self.readall()
547 b = bytearray(n.__index__())
548 n = self.readinto(b)
Antoine Pitrou6391b342010-09-14 18:48:19 +0000549 if n is None:
550 return None
Antoine Pitrou19690592009-06-12 20:14:08 +0000551 del b[n:]
552 return bytes(b)
553
554 def readall(self):
555 """Read until EOF, using multiple read() call."""
556 res = bytearray()
557 while True:
558 data = self.read(DEFAULT_BUFFER_SIZE)
559 if not data:
560 break
561 res += data
562 return bytes(res)
563
564 def readinto(self, b):
565 """Read up to len(b) bytes into b.
566
567 Returns number of bytes read (0 for EOF), or None if the object
Antoine Pitrou6391b342010-09-14 18:48:19 +0000568 is set not to block and has no data to read.
Antoine Pitrou19690592009-06-12 20:14:08 +0000569 """
570 self._unsupported("readinto")
571
572 def write(self, b):
573 """Write the given buffer to the IO stream.
574
575 Returns the number of bytes written, which may be less than len(b).
576 """
577 self._unsupported("write")
578
579io.RawIOBase.register(RawIOBase)
580from _io import FileIO
581RawIOBase.register(FileIO)
582
583
584class BufferedIOBase(IOBase):
585
586 """Base class for buffered IO objects.
587
588 The main difference with RawIOBase is that the read() method
589 supports omitting the size argument, and does not have a default
590 implementation that defers to readinto().
591
592 In addition, read(), readinto() and write() may raise
593 BlockingIOError if the underlying raw stream is in non-blocking
594 mode and not ready; unlike their raw counterparts, they will never
595 return None.
596
597 A typical implementation should not inherit from a RawIOBase
598 implementation, but wrap one.
599 """
600
601 def read(self, n=None):
602 """Read and return up to n bytes.
603
604 If the argument is omitted, None, or negative, reads and
605 returns all data until EOF.
606
607 If the argument is positive, and the underlying raw stream is
608 not 'interactive', multiple raw reads may be issued to satisfy
609 the byte count (unless EOF is reached first). But for
610 interactive raw streams (XXX and for pipes?), at most one raw
611 read will be issued, and a short result does not imply that
612 EOF is imminent.
613
614 Returns an empty bytes array on EOF.
615
616 Raises BlockingIOError if the underlying raw stream has no
617 data at the moment.
618 """
619 self._unsupported("read")
620
621 def read1(self, n=None):
622 """Read up to n bytes with at most one read() system call."""
623 self._unsupported("read1")
624
625 def readinto(self, b):
626 """Read up to len(b) bytes into b.
627
628 Like read(), this may issue multiple reads to the underlying raw
629 stream, unless the latter is 'interactive'.
630
631 Returns the number of bytes read (0 for EOF).
632
633 Raises BlockingIOError if the underlying raw stream has no
634 data at the moment.
635 """
636 # XXX This ought to work with anything that supports the buffer API
637 data = self.read(len(b))
638 n = len(data)
639 try:
640 b[:n] = data
641 except TypeError as err:
642 import array
643 if not isinstance(b, array.array):
644 raise err
645 b[:n] = array.array(b'b', data)
646 return n
647
648 def write(self, b):
649 """Write the given buffer to the IO stream.
650
651 Return the number of bytes written, which is never less than
652 len(b).
653
654 Raises BlockingIOError if the buffer is full and the
655 underlying raw stream cannot accept more data at the moment.
656 """
657 self._unsupported("write")
658
659 def detach(self):
660 """
661 Separate the underlying raw stream from the buffer and return it.
662
663 After the raw stream has been detached, the buffer is in an unusable
664 state.
665 """
666 self._unsupported("detach")
667
668io.BufferedIOBase.register(BufferedIOBase)
669
670
671class _BufferedIOMixin(BufferedIOBase):
672
673 """A mixin implementation of BufferedIOBase with an underlying raw stream.
674
675 This passes most requests on to the underlying raw stream. It
676 does *not* provide implementations of read(), readinto() or
677 write().
678 """
679
680 def __init__(self, raw):
Antoine Pitroufc9ead62010-12-21 21:26:55 +0000681 self._raw = raw
Antoine Pitrou19690592009-06-12 20:14:08 +0000682
683 ### Positioning ###
684
685 def seek(self, pos, whence=0):
686 new_position = self.raw.seek(pos, whence)
687 if new_position < 0:
688 raise IOError("seek() returned an invalid position")
689 return new_position
690
691 def tell(self):
692 pos = self.raw.tell()
693 if pos < 0:
694 raise IOError("tell() returned an invalid position")
695 return pos
696
697 def truncate(self, pos=None):
698 # Flush the stream. We're mixing buffered I/O with lower-level I/O,
699 # and a flush may be necessary to synch both views of the current
700 # file state.
701 self.flush()
702
703 if pos is None:
704 pos = self.tell()
705 # XXX: Should seek() be used, instead of passing the position
706 # XXX directly to truncate?
707 return self.raw.truncate(pos)
708
709 ### Flush and close ###
710
711 def flush(self):
Antoine Pitrouf7fd8e42010-05-03 16:25:33 +0000712 if self.closed:
713 raise ValueError("flush of closed file")
Antoine Pitrou19690592009-06-12 20:14:08 +0000714 self.raw.flush()
715
716 def close(self):
Antoine Pitrouf7fd8e42010-05-03 16:25:33 +0000717 if self.raw is not None and not self.closed:
718 self.flush()
Antoine Pitrou19690592009-06-12 20:14:08 +0000719 self.raw.close()
720
721 def detach(self):
722 if self.raw is None:
723 raise ValueError("raw stream already detached")
724 self.flush()
Antoine Pitroufc9ead62010-12-21 21:26:55 +0000725 raw = self._raw
726 self._raw = None
Antoine Pitrou19690592009-06-12 20:14:08 +0000727 return raw
728
729 ### Inquiries ###
730
731 def seekable(self):
732 return self.raw.seekable()
733
734 def readable(self):
735 return self.raw.readable()
736
737 def writable(self):
738 return self.raw.writable()
739
740 @property
Antoine Pitroufc9ead62010-12-21 21:26:55 +0000741 def raw(self):
742 return self._raw
743
744 @property
Antoine Pitrou19690592009-06-12 20:14:08 +0000745 def closed(self):
746 return self.raw.closed
747
748 @property
749 def name(self):
750 return self.raw.name
751
752 @property
753 def mode(self):
754 return self.raw.mode
755
756 def __repr__(self):
757 clsname = self.__class__.__name__
758 try:
759 name = self.name
760 except AttributeError:
761 return "<_pyio.{0}>".format(clsname)
762 else:
763 return "<_pyio.{0} name={1!r}>".format(clsname, name)
764
765 ### Lower-level APIs ###
766
767 def fileno(self):
768 return self.raw.fileno()
769
770 def isatty(self):
771 return self.raw.isatty()
772
773
774class BytesIO(BufferedIOBase):
775
776 """Buffered I/O implementation using an in-memory bytes buffer."""
777
778 def __init__(self, initial_bytes=None):
779 buf = bytearray()
780 if initial_bytes is not None:
781 buf.extend(initial_bytes)
782 self._buffer = buf
783 self._pos = 0
784
Antoine Pitroufa94e802009-10-24 12:23:18 +0000785 def __getstate__(self):
786 if self.closed:
787 raise ValueError("__getstate__ on closed file")
788 return self.__dict__.copy()
789
Antoine Pitrou19690592009-06-12 20:14:08 +0000790 def getvalue(self):
791 """Return the bytes value (contents) of the buffer
792 """
793 if self.closed:
794 raise ValueError("getvalue on closed file")
795 return bytes(self._buffer)
796
797 def read(self, n=None):
798 if self.closed:
799 raise ValueError("read from closed file")
800 if n is None:
801 n = -1
802 if not isinstance(n, (int, long)):
803 raise TypeError("integer argument expected, got {0!r}".format(
804 type(n)))
805 if n < 0:
806 n = len(self._buffer)
807 if len(self._buffer) <= self._pos:
808 return b""
809 newpos = min(len(self._buffer), self._pos + n)
810 b = self._buffer[self._pos : newpos]
811 self._pos = newpos
812 return bytes(b)
813
814 def read1(self, n):
815 """This is the same as read.
816 """
817 return self.read(n)
818
819 def write(self, b):
820 if self.closed:
821 raise ValueError("write to closed file")
822 if isinstance(b, unicode):
823 raise TypeError("can't write unicode to binary stream")
824 n = len(b)
825 if n == 0:
826 return 0
827 pos = self._pos
828 if pos > len(self._buffer):
829 # Inserts null bytes between the current end of the file
830 # and the new write position.
831 padding = b'\x00' * (pos - len(self._buffer))
832 self._buffer += padding
833 self._buffer[pos:pos + n] = b
834 self._pos += n
835 return n
836
837 def seek(self, pos, whence=0):
838 if self.closed:
839 raise ValueError("seek on closed file")
840 try:
Florent Xicluna1f3b4e12010-03-07 12:14:25 +0000841 pos.__index__
842 except AttributeError:
Antoine Pitrou19690592009-06-12 20:14:08 +0000843 raise TypeError("an integer is required")
844 if whence == 0:
845 if pos < 0:
846 raise ValueError("negative seek position %r" % (pos,))
847 self._pos = pos
848 elif whence == 1:
849 self._pos = max(0, self._pos + pos)
850 elif whence == 2:
851 self._pos = max(0, len(self._buffer) + pos)
852 else:
853 raise ValueError("invalid whence value")
854 return self._pos
855
856 def tell(self):
857 if self.closed:
858 raise ValueError("tell on closed file")
859 return self._pos
860
861 def truncate(self, pos=None):
862 if self.closed:
863 raise ValueError("truncate on closed file")
864 if pos is None:
865 pos = self._pos
Florent Xicluna1f3b4e12010-03-07 12:14:25 +0000866 else:
867 try:
868 pos.__index__
869 except AttributeError:
870 raise TypeError("an integer is required")
871 if pos < 0:
872 raise ValueError("negative truncate position %r" % (pos,))
Antoine Pitrou19690592009-06-12 20:14:08 +0000873 del self._buffer[pos:]
Antoine Pitrouf3fa0742010-01-31 22:26:04 +0000874 return pos
Antoine Pitrou19690592009-06-12 20:14:08 +0000875
876 def readable(self):
877 return True
878
879 def writable(self):
880 return True
881
882 def seekable(self):
883 return True
884
885
886class BufferedReader(_BufferedIOMixin):
887
888 """BufferedReader(raw[, buffer_size])
889
890 A buffer for a readable, sequential BaseRawIO object.
891
892 The constructor creates a BufferedReader for the given readable raw
893 stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
894 is used.
895 """
896
897 def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
898 """Create a new buffered reader using the given readable raw IO object.
899 """
900 if not raw.readable():
901 raise IOError('"raw" argument must be readable.')
902
903 _BufferedIOMixin.__init__(self, raw)
904 if buffer_size <= 0:
905 raise ValueError("invalid buffer size")
906 self.buffer_size = buffer_size
907 self._reset_read_buf()
908 self._read_lock = Lock()
909
910 def _reset_read_buf(self):
911 self._read_buf = b""
912 self._read_pos = 0
913
914 def read(self, n=None):
915 """Read n bytes.
916
917 Returns exactly n bytes of data unless the underlying raw IO
918 stream reaches EOF or if the call would block in non-blocking
919 mode. If n is negative, read until EOF or until read() would
920 block.
921 """
922 if n is not None and n < -1:
923 raise ValueError("invalid number of bytes to read")
924 with self._read_lock:
925 return self._read_unlocked(n)
926
927 def _read_unlocked(self, n=None):
928 nodata_val = b""
929 empty_values = (b"", None)
930 buf = self._read_buf
931 pos = self._read_pos
932
933 # Special case for when the number of bytes to read is unspecified.
934 if n is None or n == -1:
935 self._reset_read_buf()
936 chunks = [buf[pos:]] # Strip the consumed bytes.
937 current_size = 0
938 while True:
939 # Read until EOF or until read() would block.
940 chunk = self.raw.read()
941 if chunk in empty_values:
942 nodata_val = chunk
943 break
944 current_size += len(chunk)
945 chunks.append(chunk)
946 return b"".join(chunks) or nodata_val
947
948 # The number of bytes to read is specified, return at most n bytes.
949 avail = len(buf) - pos # Length of the available buffered data.
950 if n <= avail:
951 # Fast path: the data to read is fully buffered.
952 self._read_pos += n
953 return buf[pos:pos+n]
954 # Slow path: read from the stream until enough bytes are read,
955 # or until an EOF occurs or until read() would block.
956 chunks = [buf[pos:]]
957 wanted = max(self.buffer_size, n)
958 while avail < n:
959 chunk = self.raw.read(wanted)
960 if chunk in empty_values:
961 nodata_val = chunk
962 break
963 avail += len(chunk)
964 chunks.append(chunk)
965 # n is more then avail only when an EOF occurred or when
966 # read() would have blocked.
967 n = min(n, avail)
968 out = b"".join(chunks)
969 self._read_buf = out[n:] # Save the extra data in the buffer.
970 self._read_pos = 0
971 return out[:n] if out else nodata_val
972
973 def peek(self, n=0):
974 """Returns buffered bytes without advancing the position.
975
976 The argument indicates a desired minimal number of bytes; we
977 do at most one raw read to satisfy it. We never return more
978 than self.buffer_size.
979 """
980 with self._read_lock:
981 return self._peek_unlocked(n)
982
983 def _peek_unlocked(self, n=0):
984 want = min(n, self.buffer_size)
985 have = len(self._read_buf) - self._read_pos
986 if have < want or have <= 0:
987 to_read = self.buffer_size - have
988 current = self.raw.read(to_read)
989 if current:
990 self._read_buf = self._read_buf[self._read_pos:] + current
991 self._read_pos = 0
992 return self._read_buf[self._read_pos:]
993
994 def read1(self, n):
995 """Reads up to n bytes, with at most one read() system call."""
996 # Returns up to n bytes. If at least one byte is buffered, we
997 # only return buffered bytes. Otherwise, we do one raw read.
998 if n < 0:
999 raise ValueError("number of bytes to read must be positive")
1000 if n == 0:
1001 return b""
1002 with self._read_lock:
1003 self._peek_unlocked(1)
1004 return self._read_unlocked(
1005 min(n, len(self._read_buf) - self._read_pos))
1006
1007 def tell(self):
1008 return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
1009
1010 def seek(self, pos, whence=0):
1011 if not (0 <= whence <= 2):
1012 raise ValueError("invalid whence value")
1013 with self._read_lock:
1014 if whence == 1:
1015 pos -= len(self._read_buf) - self._read_pos
1016 pos = _BufferedIOMixin.seek(self, pos, whence)
1017 self._reset_read_buf()
1018 return pos
1019
1020class BufferedWriter(_BufferedIOMixin):
1021
1022 """A buffer for a writeable sequential RawIO object.
1023
1024 The constructor creates a BufferedWriter for the given writeable raw
1025 stream. If the buffer_size is not given, it defaults to
1026 DEFAULT_BUFFER_SIZE.
1027 """
1028
1029 _warning_stack_offset = 2
1030
1031 def __init__(self, raw,
1032 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1033 if not raw.writable():
1034 raise IOError('"raw" argument must be writable.')
1035
1036 _BufferedIOMixin.__init__(self, raw)
1037 if buffer_size <= 0:
1038 raise ValueError("invalid buffer size")
1039 if max_buffer_size is not None:
1040 warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
1041 self._warning_stack_offset)
1042 self.buffer_size = buffer_size
1043 self._write_buf = bytearray()
1044 self._write_lock = Lock()
1045
1046 def write(self, b):
1047 if self.closed:
1048 raise ValueError("write to closed file")
1049 if isinstance(b, unicode):
1050 raise TypeError("can't write unicode to binary stream")
1051 with self._write_lock:
1052 # XXX we can implement some more tricks to try and avoid
1053 # partial writes
1054 if len(self._write_buf) > self.buffer_size:
1055 # We're full, so let's pre-flush the buffer
1056 try:
1057 self._flush_unlocked()
1058 except BlockingIOError as e:
1059 # We can't accept anything else.
1060 # XXX Why not just let the exception pass through?
1061 raise BlockingIOError(e.errno, e.strerror, 0)
1062 before = len(self._write_buf)
1063 self._write_buf.extend(b)
1064 written = len(self._write_buf) - before
1065 if len(self._write_buf) > self.buffer_size:
1066 try:
1067 self._flush_unlocked()
1068 except BlockingIOError as e:
1069 if len(self._write_buf) > self.buffer_size:
1070 # We've hit the buffer_size. We have to accept a partial
1071 # write and cut back our buffer.
1072 overage = len(self._write_buf) - self.buffer_size
1073 written -= overage
1074 self._write_buf = self._write_buf[:self.buffer_size]
1075 raise BlockingIOError(e.errno, e.strerror, written)
1076 return written
1077
1078 def truncate(self, pos=None):
1079 with self._write_lock:
1080 self._flush_unlocked()
1081 if pos is None:
1082 pos = self.raw.tell()
1083 return self.raw.truncate(pos)
1084
1085 def flush(self):
1086 with self._write_lock:
1087 self._flush_unlocked()
1088
1089 def _flush_unlocked(self):
1090 if self.closed:
1091 raise ValueError("flush of closed file")
1092 written = 0
1093 try:
1094 while self._write_buf:
1095 n = self.raw.write(self._write_buf)
1096 if n > len(self._write_buf) or n < 0:
1097 raise IOError("write() returned incorrect number of bytes")
1098 del self._write_buf[:n]
1099 written += n
1100 except BlockingIOError as e:
1101 n = e.characters_written
1102 del self._write_buf[:n]
1103 written += n
1104 raise BlockingIOError(e.errno, e.strerror, written)
1105
1106 def tell(self):
1107 return _BufferedIOMixin.tell(self) + len(self._write_buf)
1108
1109 def seek(self, pos, whence=0):
1110 if not (0 <= whence <= 2):
1111 raise ValueError("invalid whence")
1112 with self._write_lock:
1113 self._flush_unlocked()
1114 return _BufferedIOMixin.seek(self, pos, whence)
1115
1116
1117class BufferedRWPair(BufferedIOBase):
1118
1119 """A buffered reader and writer object together.
1120
1121 A buffered reader object and buffered writer object put together to
1122 form a sequential IO object that can read and write. This is typically
1123 used with a socket or two-way pipe.
1124
1125 reader and writer are RawIOBase objects that are readable and
1126 writeable respectively. If the buffer_size is omitted it defaults to
1127 DEFAULT_BUFFER_SIZE.
1128 """
1129
1130 # XXX The usefulness of this (compared to having two separate IO
1131 # objects) is questionable.
1132
1133 def __init__(self, reader, writer,
1134 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1135 """Constructor.
1136
1137 The arguments are two RawIO instances.
1138 """
1139 if max_buffer_size is not None:
1140 warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
1141
1142 if not reader.readable():
1143 raise IOError('"reader" argument must be readable.')
1144
1145 if not writer.writable():
1146 raise IOError('"writer" argument must be writable.')
1147
1148 self.reader = BufferedReader(reader, buffer_size)
1149 self.writer = BufferedWriter(writer, buffer_size)
1150
1151 def read(self, n=None):
1152 if n is None:
1153 n = -1
1154 return self.reader.read(n)
1155
1156 def readinto(self, b):
1157 return self.reader.readinto(b)
1158
1159 def write(self, b):
1160 return self.writer.write(b)
1161
1162 def peek(self, n=0):
1163 return self.reader.peek(n)
1164
1165 def read1(self, n):
1166 return self.reader.read1(n)
1167
1168 def readable(self):
1169 return self.reader.readable()
1170
1171 def writable(self):
1172 return self.writer.writable()
1173
1174 def flush(self):
1175 return self.writer.flush()
1176
1177 def close(self):
1178 self.writer.close()
1179 self.reader.close()
1180
1181 def isatty(self):
1182 return self.reader.isatty() or self.writer.isatty()
1183
1184 @property
1185 def closed(self):
1186 return self.writer.closed
1187
1188
1189class BufferedRandom(BufferedWriter, BufferedReader):
1190
1191 """A buffered interface to random access streams.
1192
1193 The constructor creates a reader and writer for a seekable stream,
1194 raw, given in the first argument. If the buffer_size is omitted it
1195 defaults to DEFAULT_BUFFER_SIZE.
1196 """
1197
1198 _warning_stack_offset = 3
1199
1200 def __init__(self, raw,
1201 buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
1202 raw._checkSeekable()
1203 BufferedReader.__init__(self, raw, buffer_size)
1204 BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
1205
1206 def seek(self, pos, whence=0):
1207 if not (0 <= whence <= 2):
1208 raise ValueError("invalid whence")
1209 self.flush()
1210 if self._read_buf:
1211 # Undo read ahead.
1212 with self._read_lock:
1213 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1214 # First do the raw seek, then empty the read buffer, so that
1215 # if the raw seek fails, we don't lose buffered data forever.
1216 pos = self.raw.seek(pos, whence)
1217 with self._read_lock:
1218 self._reset_read_buf()
1219 if pos < 0:
1220 raise IOError("seek() returned invalid position")
1221 return pos
1222
1223 def tell(self):
1224 if self._write_buf:
1225 return BufferedWriter.tell(self)
1226 else:
1227 return BufferedReader.tell(self)
1228
1229 def truncate(self, pos=None):
1230 if pos is None:
1231 pos = self.tell()
1232 # Use seek to flush the read buffer.
Antoine Pitrouf3fa0742010-01-31 22:26:04 +00001233 return BufferedWriter.truncate(self, pos)
Antoine Pitrou19690592009-06-12 20:14:08 +00001234
1235 def read(self, n=None):
1236 if n is None:
1237 n = -1
1238 self.flush()
1239 return BufferedReader.read(self, n)
1240
1241 def readinto(self, b):
1242 self.flush()
1243 return BufferedReader.readinto(self, b)
1244
1245 def peek(self, n=0):
1246 self.flush()
1247 return BufferedReader.peek(self, n)
1248
1249 def read1(self, n):
1250 self.flush()
1251 return BufferedReader.read1(self, n)
1252
1253 def write(self, b):
1254 if self._read_buf:
1255 # Undo readahead
1256 with self._read_lock:
1257 self.raw.seek(self._read_pos - len(self._read_buf), 1)
1258 self._reset_read_buf()
1259 return BufferedWriter.write(self, b)
1260
1261
1262class TextIOBase(IOBase):
1263
1264 """Base class for text I/O.
1265
1266 This class provides a character and line based interface to stream
1267 I/O. There is no readinto method because Python's character strings
1268 are immutable. There is no public constructor.
1269 """
1270
1271 def read(self, n=-1):
1272 """Read at most n characters from stream.
1273
1274 Read from underlying buffer until we have n characters or we hit EOF.
1275 If n is negative or omitted, read until EOF.
1276 """
1277 self._unsupported("read")
1278
1279 def write(self, s):
1280 """Write string s to stream."""
1281 self._unsupported("write")
1282
1283 def truncate(self, pos=None):
1284 """Truncate size to pos."""
1285 self._unsupported("truncate")
1286
1287 def readline(self):
1288 """Read until newline or EOF.
1289
1290 Returns an empty string if EOF is hit immediately.
1291 """
1292 self._unsupported("readline")
1293
1294 def detach(self):
1295 """
1296 Separate the underlying buffer from the TextIOBase and return it.
1297
1298 After the underlying buffer has been detached, the TextIO is in an
1299 unusable state.
1300 """
1301 self._unsupported("detach")
1302
1303 @property
1304 def encoding(self):
1305 """Subclasses should override."""
1306 return None
1307
1308 @property
1309 def newlines(self):
1310 """Line endings translated so far.
1311
1312 Only line endings translated during reading are considered.
1313
1314 Subclasses should override.
1315 """
1316 return None
1317
1318 @property
1319 def errors(self):
1320 """Error setting of the decoder or encoder.
1321
1322 Subclasses should override."""
1323 return None
1324
1325io.TextIOBase.register(TextIOBase)
1326
1327
1328class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
1329 r"""Codec used when reading a file in universal newlines mode. It wraps
1330 another incremental decoder, translating \r\n and \r into \n. It also
1331 records the types of newlines encountered. When used with
1332 translate=False, it ensures that the newline sequence is returned in
1333 one piece.
1334 """
1335 def __init__(self, decoder, translate, errors='strict'):
1336 codecs.IncrementalDecoder.__init__(self, errors=errors)
1337 self.translate = translate
1338 self.decoder = decoder
1339 self.seennl = 0
1340 self.pendingcr = False
1341
1342 def decode(self, input, final=False):
1343 # decode input (with the eventual \r from a previous pass)
1344 if self.decoder is None:
1345 output = input
1346 else:
1347 output = self.decoder.decode(input, final=final)
1348 if self.pendingcr and (output or final):
1349 output = "\r" + output
1350 self.pendingcr = False
1351
1352 # retain last \r even when not translating data:
1353 # then readline() is sure to get \r\n in one pass
1354 if output.endswith("\r") and not final:
1355 output = output[:-1]
1356 self.pendingcr = True
1357
1358 # Record which newlines are read
1359 crlf = output.count('\r\n')
1360 cr = output.count('\r') - crlf
1361 lf = output.count('\n') - crlf
1362 self.seennl |= (lf and self._LF) | (cr and self._CR) \
1363 | (crlf and self._CRLF)
1364
1365 if self.translate:
1366 if crlf:
1367 output = output.replace("\r\n", "\n")
1368 if cr:
1369 output = output.replace("\r", "\n")
1370
1371 return output
1372
1373 def getstate(self):
1374 if self.decoder is None:
1375 buf = b""
1376 flag = 0
1377 else:
1378 buf, flag = self.decoder.getstate()
1379 flag <<= 1
1380 if self.pendingcr:
1381 flag |= 1
1382 return buf, flag
1383
1384 def setstate(self, state):
1385 buf, flag = state
1386 self.pendingcr = bool(flag & 1)
1387 if self.decoder is not None:
1388 self.decoder.setstate((buf, flag >> 1))
1389
1390 def reset(self):
1391 self.seennl = 0
1392 self.pendingcr = False
1393 if self.decoder is not None:
1394 self.decoder.reset()
1395
1396 _LF = 1
1397 _CR = 2
1398 _CRLF = 4
1399
1400 @property
1401 def newlines(self):
1402 return (None,
1403 "\n",
1404 "\r",
1405 ("\r", "\n"),
1406 "\r\n",
1407 ("\n", "\r\n"),
1408 ("\r", "\r\n"),
1409 ("\r", "\n", "\r\n")
1410 )[self.seennl]
1411
1412
1413class TextIOWrapper(TextIOBase):
1414
1415 r"""Character and line based layer over a BufferedIOBase object, buffer.
1416
1417 encoding gives the name of the encoding that the stream will be
1418 decoded or encoded with. It defaults to locale.getpreferredencoding.
1419
1420 errors determines the strictness of encoding and decoding (see the
1421 codecs.register) and defaults to "strict".
1422
1423 newline can be None, '', '\n', '\r', or '\r\n'. It controls the
1424 handling of line endings. If it is None, universal newlines is
1425 enabled. With this enabled, on input, the lines endings '\n', '\r',
1426 or '\r\n' are translated to '\n' before being returned to the
1427 caller. Conversely, on output, '\n' is translated to the system
1428 default line seperator, os.linesep. If newline is any other of its
1429 legal values, that newline becomes the newline when the file is read
1430 and it is returned untranslated. On output, '\n' is converted to the
1431 newline.
1432
1433 If line_buffering is True, a call to flush is implied when a call to
1434 write contains a newline character.
1435 """
1436
1437 _CHUNK_SIZE = 2048
1438
1439 def __init__(self, buffer, encoding=None, errors=None, newline=None,
1440 line_buffering=False):
1441 if newline is not None and not isinstance(newline, basestring):
1442 raise TypeError("illegal newline type: %r" % (type(newline),))
1443 if newline not in (None, "", "\n", "\r", "\r\n"):
1444 raise ValueError("illegal newline value: %r" % (newline,))
1445 if encoding is None:
1446 try:
Victor Stinner71202192010-05-04 11:35:36 +00001447 import locale
1448 except ImportError:
1449 # Importing locale may fail if Python is being built
1450 encoding = "ascii"
1451 else:
1452 encoding = locale.getpreferredencoding()
Antoine Pitrou19690592009-06-12 20:14:08 +00001453
1454 if not isinstance(encoding, basestring):
1455 raise ValueError("invalid encoding: %r" % encoding)
1456
1457 if errors is None:
1458 errors = "strict"
1459 else:
1460 if not isinstance(errors, basestring):
1461 raise ValueError("invalid errors: %r" % errors)
1462
Antoine Pitroufc9ead62010-12-21 21:26:55 +00001463 self._buffer = buffer
Antoine Pitrou19690592009-06-12 20:14:08 +00001464 self._line_buffering = line_buffering
1465 self._encoding = encoding
1466 self._errors = errors
1467 self._readuniversal = not newline
1468 self._readtranslate = newline is None
1469 self._readnl = newline
1470 self._writetranslate = newline != ''
1471 self._writenl = newline or os.linesep
1472 self._encoder = None
1473 self._decoder = None
1474 self._decoded_chars = '' # buffer for text returned from decoder
1475 self._decoded_chars_used = 0 # offset into _decoded_chars for read()
1476 self._snapshot = None # info for reconstructing decoder state
1477 self._seekable = self._telling = self.buffer.seekable()
1478
1479 if self._seekable and self.writable():
1480 position = self.buffer.tell()
1481 if position != 0:
1482 try:
1483 self._get_encoder().setstate(0)
1484 except LookupError:
1485 # Sometimes the encoder doesn't exist
1486 pass
1487
1488 # self._snapshot is either None, or a tuple (dec_flags, next_input)
1489 # where dec_flags is the second (integer) item of the decoder state
1490 # and next_input is the chunk of input bytes that comes next after the
1491 # snapshot point. We use this to reconstruct decoder states in tell().
1492
1493 # Naming convention:
1494 # - "bytes_..." for integer variables that count input bytes
1495 # - "chars_..." for integer variables that count decoded characters
1496
1497 def __repr__(self):
1498 try:
1499 name = self.name
1500 except AttributeError:
1501 return "<_pyio.TextIOWrapper encoding='{0}'>".format(self.encoding)
1502 else:
1503 return "<_pyio.TextIOWrapper name={0!r} encoding='{1}'>".format(
1504 name, self.encoding)
1505
1506 @property
1507 def encoding(self):
1508 return self._encoding
1509
1510 @property
1511 def errors(self):
1512 return self._errors
1513
1514 @property
1515 def line_buffering(self):
1516 return self._line_buffering
1517
Antoine Pitroufc9ead62010-12-21 21:26:55 +00001518 @property
1519 def buffer(self):
1520 return self._buffer
1521
Antoine Pitrou19690592009-06-12 20:14:08 +00001522 def seekable(self):
1523 return self._seekable
1524
1525 def readable(self):
1526 return self.buffer.readable()
1527
1528 def writable(self):
1529 return self.buffer.writable()
1530
1531 def flush(self):
1532 self.buffer.flush()
1533 self._telling = self._seekable
1534
1535 def close(self):
Antoine Pitrouf7fd8e42010-05-03 16:25:33 +00001536 if self.buffer is not None and not self.closed:
1537 self.flush()
Antoine Pitrou19690592009-06-12 20:14:08 +00001538 self.buffer.close()
1539
1540 @property
1541 def closed(self):
1542 return self.buffer.closed
1543
1544 @property
1545 def name(self):
1546 return self.buffer.name
1547
1548 def fileno(self):
1549 return self.buffer.fileno()
1550
1551 def isatty(self):
1552 return self.buffer.isatty()
1553
1554 def write(self, s):
1555 if self.closed:
1556 raise ValueError("write to closed file")
1557 if not isinstance(s, unicode):
1558 raise TypeError("can't write %s to text stream" %
1559 s.__class__.__name__)
1560 length = len(s)
1561 haslf = (self._writetranslate or self._line_buffering) and "\n" in s
1562 if haslf and self._writetranslate and self._writenl != "\n":
1563 s = s.replace("\n", self._writenl)
1564 encoder = self._encoder or self._get_encoder()
1565 # XXX What if we were just reading?
1566 b = encoder.encode(s)
1567 self.buffer.write(b)
1568 if self._line_buffering and (haslf or "\r" in s):
1569 self.flush()
1570 self._snapshot = None
1571 if self._decoder:
1572 self._decoder.reset()
1573 return length
1574
1575 def _get_encoder(self):
1576 make_encoder = codecs.getincrementalencoder(self._encoding)
1577 self._encoder = make_encoder(self._errors)
1578 return self._encoder
1579
1580 def _get_decoder(self):
1581 make_decoder = codecs.getincrementaldecoder(self._encoding)
1582 decoder = make_decoder(self._errors)
1583 if self._readuniversal:
1584 decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
1585 self._decoder = decoder
1586 return decoder
1587
1588 # The following three methods implement an ADT for _decoded_chars.
1589 # Text returned from the decoder is buffered here until the client
1590 # requests it by calling our read() or readline() method.
1591 def _set_decoded_chars(self, chars):
1592 """Set the _decoded_chars buffer."""
1593 self._decoded_chars = chars
1594 self._decoded_chars_used = 0
1595
1596 def _get_decoded_chars(self, n=None):
1597 """Advance into the _decoded_chars buffer."""
1598 offset = self._decoded_chars_used
1599 if n is None:
1600 chars = self._decoded_chars[offset:]
1601 else:
1602 chars = self._decoded_chars[offset:offset + n]
1603 self._decoded_chars_used += len(chars)
1604 return chars
1605
1606 def _rewind_decoded_chars(self, n):
1607 """Rewind the _decoded_chars buffer."""
1608 if self._decoded_chars_used < n:
1609 raise AssertionError("rewind decoded_chars out of bounds")
1610 self._decoded_chars_used -= n
1611
1612 def _read_chunk(self):
1613 """
1614 Read and decode the next chunk of data from the BufferedReader.
1615 """
1616
1617 # The return value is True unless EOF was reached. The decoded
1618 # string is placed in self._decoded_chars (replacing its previous
1619 # value). The entire input chunk is sent to the decoder, though
1620 # some of it may remain buffered in the decoder, yet to be
1621 # converted.
1622
1623 if self._decoder is None:
1624 raise ValueError("no decoder")
1625
1626 if self._telling:
1627 # To prepare for tell(), we need to snapshot a point in the
1628 # file where the decoder's input buffer is empty.
1629
1630 dec_buffer, dec_flags = self._decoder.getstate()
1631 # Given this, we know there was a valid snapshot point
1632 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
1633
1634 # Read a chunk, decode it, and put the result in self._decoded_chars.
1635 input_chunk = self.buffer.read1(self._CHUNK_SIZE)
1636 eof = not input_chunk
1637 self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
1638
1639 if self._telling:
1640 # At the snapshot point, len(dec_buffer) bytes before the read,
1641 # the next input to be decoded is dec_buffer + input_chunk.
1642 self._snapshot = (dec_flags, dec_buffer + input_chunk)
1643
1644 return not eof
1645
1646 def _pack_cookie(self, position, dec_flags=0,
1647 bytes_to_feed=0, need_eof=0, chars_to_skip=0):
1648 # The meaning of a tell() cookie is: seek to position, set the
1649 # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
1650 # into the decoder with need_eof as the EOF flag, then skip
1651 # chars_to_skip characters of the decoded result. For most simple
1652 # decoders, tell() will often just give a byte offset in the file.
1653 return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
1654 (chars_to_skip<<192) | bool(need_eof)<<256)
1655
1656 def _unpack_cookie(self, bigint):
1657 rest, position = divmod(bigint, 1<<64)
1658 rest, dec_flags = divmod(rest, 1<<64)
1659 rest, bytes_to_feed = divmod(rest, 1<<64)
1660 need_eof, chars_to_skip = divmod(rest, 1<<64)
1661 return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
1662
1663 def tell(self):
1664 if not self._seekable:
1665 raise IOError("underlying stream is not seekable")
1666 if not self._telling:
1667 raise IOError("telling position disabled by next() call")
1668 self.flush()
1669 position = self.buffer.tell()
1670 decoder = self._decoder
1671 if decoder is None or self._snapshot is None:
1672 if self._decoded_chars:
1673 # This should never happen.
1674 raise AssertionError("pending decoded text")
1675 return position
1676
1677 # Skip backward to the snapshot point (see _read_chunk).
1678 dec_flags, next_input = self._snapshot
1679 position -= len(next_input)
1680
1681 # How many decoded characters have been used up since the snapshot?
1682 chars_to_skip = self._decoded_chars_used
1683 if chars_to_skip == 0:
1684 # We haven't moved from the snapshot point.
1685 return self._pack_cookie(position, dec_flags)
1686
1687 # Starting from the snapshot position, we will walk the decoder
1688 # forward until it gives us enough decoded characters.
1689 saved_state = decoder.getstate()
1690 try:
1691 # Note our initial start point.
1692 decoder.setstate((b'', dec_flags))
1693 start_pos = position
1694 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1695 need_eof = 0
1696
1697 # Feed the decoder one byte at a time. As we go, note the
1698 # nearest "safe start point" before the current location
1699 # (a point where the decoder has nothing buffered, so seek()
1700 # can safely start from there and advance to this location).
1701 for next_byte in next_input:
1702 bytes_fed += 1
1703 chars_decoded += len(decoder.decode(next_byte))
1704 dec_buffer, dec_flags = decoder.getstate()
1705 if not dec_buffer and chars_decoded <= chars_to_skip:
1706 # Decoder buffer is empty, so this is a safe start point.
1707 start_pos += bytes_fed
1708 chars_to_skip -= chars_decoded
1709 start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
1710 if chars_decoded >= chars_to_skip:
1711 break
1712 else:
1713 # We didn't get enough decoded data; signal EOF to get more.
1714 chars_decoded += len(decoder.decode(b'', final=True))
1715 need_eof = 1
1716 if chars_decoded < chars_to_skip:
1717 raise IOError("can't reconstruct logical file position")
1718
1719 # The returned cookie corresponds to the last safe start point.
1720 return self._pack_cookie(
1721 start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
1722 finally:
1723 decoder.setstate(saved_state)
1724
1725 def truncate(self, pos=None):
1726 self.flush()
1727 if pos is None:
1728 pos = self.tell()
Antoine Pitrouf3fa0742010-01-31 22:26:04 +00001729 return self.buffer.truncate(pos)
Antoine Pitrou19690592009-06-12 20:14:08 +00001730
1731 def detach(self):
1732 if self.buffer is None:
1733 raise ValueError("buffer is already detached")
1734 self.flush()
Antoine Pitroufc9ead62010-12-21 21:26:55 +00001735 buffer = self._buffer
1736 self._buffer = None
Antoine Pitrou19690592009-06-12 20:14:08 +00001737 return buffer
1738
1739 def seek(self, cookie, whence=0):
1740 if self.closed:
1741 raise ValueError("tell on closed file")
1742 if not self._seekable:
1743 raise IOError("underlying stream is not seekable")
1744 if whence == 1: # seek relative to current position
1745 if cookie != 0:
1746 raise IOError("can't do nonzero cur-relative seeks")
1747 # Seeking to the current position should attempt to
1748 # sync the underlying buffer with the current position.
1749 whence = 0
1750 cookie = self.tell()
1751 if whence == 2: # seek relative to end of file
1752 if cookie != 0:
1753 raise IOError("can't do nonzero end-relative seeks")
1754 self.flush()
1755 position = self.buffer.seek(0, 2)
1756 self._set_decoded_chars('')
1757 self._snapshot = None
1758 if self._decoder:
1759 self._decoder.reset()
1760 return position
1761 if whence != 0:
1762 raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
1763 (whence,))
1764 if cookie < 0:
1765 raise ValueError("negative seek position %r" % (cookie,))
1766 self.flush()
1767
1768 # The strategy of seek() is to go back to the safe start point
1769 # and replay the effect of read(chars_to_skip) from there.
1770 start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
1771 self._unpack_cookie(cookie)
1772
1773 # Seek back to the safe start point.
1774 self.buffer.seek(start_pos)
1775 self._set_decoded_chars('')
1776 self._snapshot = None
1777
1778 # Restore the decoder to its state from the safe start point.
1779 if cookie == 0 and self._decoder:
1780 self._decoder.reset()
1781 elif self._decoder or dec_flags or chars_to_skip:
1782 self._decoder = self._decoder or self._get_decoder()
1783 self._decoder.setstate((b'', dec_flags))
1784 self._snapshot = (dec_flags, b'')
1785
1786 if chars_to_skip:
1787 # Just like _read_chunk, feed the decoder and save a snapshot.
1788 input_chunk = self.buffer.read(bytes_to_feed)
1789 self._set_decoded_chars(
1790 self._decoder.decode(input_chunk, need_eof))
1791 self._snapshot = (dec_flags, input_chunk)
1792
1793 # Skip chars_to_skip of the decoded characters.
1794 if len(self._decoded_chars) < chars_to_skip:
1795 raise IOError("can't restore logical file position")
1796 self._decoded_chars_used = chars_to_skip
1797
1798 # Finally, reset the encoder (merely useful for proper BOM handling)
1799 try:
1800 encoder = self._encoder or self._get_encoder()
1801 except LookupError:
1802 # Sometimes the encoder doesn't exist
1803 pass
1804 else:
1805 if cookie != 0:
1806 encoder.setstate(0)
1807 else:
1808 encoder.reset()
1809 return cookie
1810
1811 def read(self, n=None):
1812 self._checkReadable()
1813 if n is None:
1814 n = -1
1815 decoder = self._decoder or self._get_decoder()
Florent Xicluna1f3b4e12010-03-07 12:14:25 +00001816 try:
1817 n.__index__
1818 except AttributeError:
1819 raise TypeError("an integer is required")
Antoine Pitrou19690592009-06-12 20:14:08 +00001820 if n < 0:
1821 # Read everything.
1822 result = (self._get_decoded_chars() +
1823 decoder.decode(self.buffer.read(), final=True))
1824 self._set_decoded_chars('')
1825 self._snapshot = None
1826 return result
1827 else:
1828 # Keep reading chunks until we have n characters to return.
1829 eof = False
1830 result = self._get_decoded_chars(n)
1831 while len(result) < n and not eof:
1832 eof = not self._read_chunk()
1833 result += self._get_decoded_chars(n - len(result))
1834 return result
1835
1836 def next(self):
1837 self._telling = False
1838 line = self.readline()
1839 if not line:
1840 self._snapshot = None
1841 self._telling = self._seekable
1842 raise StopIteration
1843 return line
1844
1845 def readline(self, limit=None):
1846 if self.closed:
1847 raise ValueError("read from closed file")
1848 if limit is None:
1849 limit = -1
1850 elif not isinstance(limit, (int, long)):
1851 raise TypeError("limit must be an integer")
1852
1853 # Grab all the decoded text (we will rewind any extra bits later).
1854 line = self._get_decoded_chars()
1855
1856 start = 0
1857 # Make the decoder if it doesn't already exist.
1858 if not self._decoder:
1859 self._get_decoder()
1860
1861 pos = endpos = None
1862 while True:
1863 if self._readtranslate:
1864 # Newlines are already translated, only search for \n
1865 pos = line.find('\n', start)
1866 if pos >= 0:
1867 endpos = pos + 1
1868 break
1869 else:
1870 start = len(line)
1871
1872 elif self._readuniversal:
1873 # Universal newline search. Find any of \r, \r\n, \n
1874 # The decoder ensures that \r\n are not split in two pieces
1875
1876 # In C we'd look for these in parallel of course.
1877 nlpos = line.find("\n", start)
1878 crpos = line.find("\r", start)
1879 if crpos == -1:
1880 if nlpos == -1:
1881 # Nothing found
1882 start = len(line)
1883 else:
1884 # Found \n
1885 endpos = nlpos + 1
1886 break
1887 elif nlpos == -1:
1888 # Found lone \r
1889 endpos = crpos + 1
1890 break
1891 elif nlpos < crpos:
1892 # Found \n
1893 endpos = nlpos + 1
1894 break
1895 elif nlpos == crpos + 1:
1896 # Found \r\n
1897 endpos = crpos + 2
1898 break
1899 else:
1900 # Found \r
1901 endpos = crpos + 1
1902 break
1903 else:
1904 # non-universal
1905 pos = line.find(self._readnl)
1906 if pos >= 0:
1907 endpos = pos + len(self._readnl)
1908 break
1909
1910 if limit >= 0 and len(line) >= limit:
1911 endpos = limit # reached length limit
1912 break
1913
1914 # No line ending seen yet - get more data'
1915 while self._read_chunk():
1916 if self._decoded_chars:
1917 break
1918 if self._decoded_chars:
1919 line += self._get_decoded_chars()
1920 else:
1921 # end of file
1922 self._set_decoded_chars('')
1923 self._snapshot = None
1924 return line
1925
1926 if limit >= 0 and endpos > limit:
1927 endpos = limit # don't exceed limit
1928
1929 # Rewind _decoded_chars to just after the line ending we found.
1930 self._rewind_decoded_chars(len(line) - endpos)
1931 return line[:endpos]
1932
1933 @property
1934 def newlines(self):
1935 return self._decoder.newlines if self._decoder else None
1936
1937
1938class StringIO(TextIOWrapper):
1939 """Text I/O implementation using an in-memory buffer.
1940
1941 The initial_value argument sets the value of object. The newline
1942 argument is like the one of TextIOWrapper's constructor.
1943 """
1944
1945 def __init__(self, initial_value="", newline="\n"):
1946 super(StringIO, self).__init__(BytesIO(),
1947 encoding="utf-8",
1948 errors="strict",
1949 newline=newline)
1950 # Issue #5645: make universal newlines semantics the same as in the
1951 # C version, even under Windows.
1952 if newline is None:
1953 self._writetranslate = False
1954 if initial_value:
1955 if not isinstance(initial_value, unicode):
1956 initial_value = unicode(initial_value)
1957 self.write(initial_value)
1958 self.seek(0)
1959
1960 def getvalue(self):
1961 self.flush()
1962 return self.buffer.getvalue().decode(self._encoding, self._errors)
1963
1964 def __repr__(self):
1965 # TextIOWrapper tells the encoding in its repr. In StringIO,
1966 # that's a implementation detail.
1967 return object.__repr__(self)
1968
1969 @property
1970 def errors(self):
1971 return None
1972
1973 @property
1974 def encoding(self):
1975 return None
1976
1977 def detach(self):
1978 # This doesn't make sense on StringIO.
1979 self._unsupported("detach")