blob: 1cafcb59875ad6ac6d0be06b62ebc18081ba5940 [file] [log] [blame]
Guido van Rossum7d9ea502003-02-03 20:45:52 +00001import unittest
Benjamin Petersonee8712c2008-05-20 21:35:26 +00002from test import support
Christian Heimesd5e2b6f2008-03-19 21:50:51 +00003import binascii
Serhiy Storchakad7a44152015-11-12 11:23:04 +02004import pickle
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +00005import random
Antoine Pitrouf3d22752011-02-21 18:09:00 +00006import sys
Antoine Pitrou94190bb2011-10-04 10:22:36 +02007from test.support import bigmemtest, _1G, _4G
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +00008
R. David Murraya21e4ca2009-03-31 23:16:50 +00009zlib = support.import_module('zlib')
10
Serhiy Storchaka43767632013-11-03 21:31:38 +020011requires_Compress_copy = unittest.skipUnless(
12 hasattr(zlib.compressobj(), "copy"),
13 'requires Compress.copy()')
14requires_Decompress_copy = unittest.skipUnless(
15 hasattr(zlib.decompressobj(), "copy"),
16 'requires Decompress.copy()')
17
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +000018
Nadeem Vawda64d25dd2011-09-12 00:04:13 +020019class VersionTestCase(unittest.TestCase):
20
21 def test_library_version(self):
Nadeem Vawda131c7072012-01-25 23:16:50 +020022 # Test that the major version of the actual library in use matches the
23 # major version that we were compiled against. We can't guarantee that
24 # the minor versions will match (even on the machine on which the module
25 # was compiled), and the API is stable between minor versions, so
Nadeem Vawdad770fe42012-01-28 17:32:47 +020026 # testing only the major versions avoids spurious failures.
Nadeem Vawda131c7072012-01-25 23:16:50 +020027 self.assertEqual(zlib.ZLIB_RUNTIME_VERSION[0], zlib.ZLIB_VERSION[0])
Nadeem Vawda64d25dd2011-09-12 00:04:13 +020028
29
Guido van Rossum7d9ea502003-02-03 20:45:52 +000030class ChecksumTestCase(unittest.TestCase):
31 # checksum test cases
32 def test_crc32start(self):
Guido van Rossum776152b2007-05-22 22:44:07 +000033 self.assertEqual(zlib.crc32(b""), zlib.crc32(b"", 0))
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000034 self.assertTrue(zlib.crc32(b"abc", 0xffffffff))
Andrew M. Kuchlingfcfc8d52001-08-10 15:50:11 +000035
Guido van Rossum7d9ea502003-02-03 20:45:52 +000036 def test_crc32empty(self):
Guido van Rossum776152b2007-05-22 22:44:07 +000037 self.assertEqual(zlib.crc32(b"", 0), 0)
38 self.assertEqual(zlib.crc32(b"", 1), 1)
39 self.assertEqual(zlib.crc32(b"", 432), 432)
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +000040
Guido van Rossum7d9ea502003-02-03 20:45:52 +000041 def test_adler32start(self):
Guido van Rossum776152b2007-05-22 22:44:07 +000042 self.assertEqual(zlib.adler32(b""), zlib.adler32(b"", 1))
Benjamin Petersonc9c0f202009-06-30 23:06:06 +000043 self.assertTrue(zlib.adler32(b"abc", 0xffffffff))
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +000044
Guido van Rossum7d9ea502003-02-03 20:45:52 +000045 def test_adler32empty(self):
Guido van Rossum776152b2007-05-22 22:44:07 +000046 self.assertEqual(zlib.adler32(b"", 0), 0)
47 self.assertEqual(zlib.adler32(b"", 1), 1)
48 self.assertEqual(zlib.adler32(b"", 432), 432)
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +000049
Guido van Rossum7d9ea502003-02-03 20:45:52 +000050 def test_penguins(self):
Martin Panterb82032f2015-12-11 05:19:29 +000051 self.assertEqual(zlib.crc32(b"penguin", 0), 0x0e5c1a120)
52 self.assertEqual(zlib.crc32(b"penguin", 1), 0x43b6aa94)
53 self.assertEqual(zlib.adler32(b"penguin", 0), 0x0bcf02f6)
54 self.assertEqual(zlib.adler32(b"penguin", 1), 0x0bd602f7)
Guido van Rossum7d9ea502003-02-03 20:45:52 +000055
Guido van Rossum776152b2007-05-22 22:44:07 +000056 self.assertEqual(zlib.crc32(b"penguin"), zlib.crc32(b"penguin", 0))
57 self.assertEqual(zlib.adler32(b"penguin"),zlib.adler32(b"penguin",1))
Guido van Rossum7d9ea502003-02-03 20:45:52 +000058
Gregory P. Smithab0d8a12008-03-17 20:24:09 +000059 def test_crc32_adler32_unsigned(self):
Antoine Pitrou77b338b2009-12-14 18:00:06 +000060 foo = b'abcdefghijklmnop'
Gregory P. Smithab0d8a12008-03-17 20:24:09 +000061 # explicitly test signed behavior
Gregory P. Smith27275032008-03-20 06:20:09 +000062 self.assertEqual(zlib.crc32(foo), 2486878355)
Antoine Pitrou77b338b2009-12-14 18:00:06 +000063 self.assertEqual(zlib.crc32(b'spam'), 1138425661)
Gregory P. Smithab0d8a12008-03-17 20:24:09 +000064 self.assertEqual(zlib.adler32(foo+foo), 3573550353)
Antoine Pitrou77b338b2009-12-14 18:00:06 +000065 self.assertEqual(zlib.adler32(b'spam'), 72286642)
Gregory P. Smithab0d8a12008-03-17 20:24:09 +000066
Christian Heimesd5e2b6f2008-03-19 21:50:51 +000067 def test_same_as_binascii_crc32(self):
Martin v. Löwis15b16a32008-12-02 06:00:15 +000068 foo = b'abcdefghijklmnop'
Gregory P. Smith27275032008-03-20 06:20:09 +000069 crc = 2486878355
Christian Heimesd5e2b6f2008-03-19 21:50:51 +000070 self.assertEqual(binascii.crc32(foo), crc)
71 self.assertEqual(zlib.crc32(foo), crc)
Martin v. Löwis15b16a32008-12-02 06:00:15 +000072 self.assertEqual(binascii.crc32(b'spam'), zlib.crc32(b'spam'))
Guido van Rossum7d9ea502003-02-03 20:45:52 +000073
74
Antoine Pitrouf3d22752011-02-21 18:09:00 +000075# Issue #10276 - check that inputs >=4GB are handled correctly.
76class ChecksumBigBufferTestCase(unittest.TestCase):
77
Nadeem Vawdabc8c8172012-02-23 14:16:15 +020078 @bigmemtest(size=_4G + 4, memuse=1, dry_run=False)
79 def test_big_buffer(self, size):
Nadeem Vawdab063a482012-02-23 13:36:25 +020080 data = b"nyan" * (_1G + 1)
81 self.assertEqual(zlib.crc32(data), 1044521549)
82 self.assertEqual(zlib.adler32(data), 2256789997)
Antoine Pitrouf3d22752011-02-21 18:09:00 +000083
Christian Heimesb186d002008-03-18 15:15:01 +000084
Guido van Rossum7d9ea502003-02-03 20:45:52 +000085class ExceptionTestCase(unittest.TestCase):
86 # make sure we generate some expected errors
Guido van Rossum8ce8a782007-11-01 19:42:39 +000087 def test_badlevel(self):
88 # specifying compression level out of range causes an error
89 # (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
90 # accepts 0 too)
Antoine Pitrou77b338b2009-12-14 18:00:06 +000091 self.assertRaises(zlib.error, zlib.compress, b'ERROR', 10)
92
93 def test_badargs(self):
94 self.assertRaises(TypeError, zlib.adler32)
95 self.assertRaises(TypeError, zlib.crc32)
96 self.assertRaises(TypeError, zlib.compress)
97 self.assertRaises(TypeError, zlib.decompress)
98 for arg in (42, None, '', 'abc', (), []):
99 self.assertRaises(TypeError, zlib.adler32, arg)
100 self.assertRaises(TypeError, zlib.crc32, arg)
101 self.assertRaises(TypeError, zlib.compress, arg)
102 self.assertRaises(TypeError, zlib.decompress, arg)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000103
104 def test_badcompressobj(self):
105 # verify failure on building compress object with bad params
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000106 self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
Guido van Rossum8ce8a782007-11-01 19:42:39 +0000107 # specifying total bits too large causes an error
108 self.assertRaises(ValueError,
109 zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000110
111 def test_baddecompressobj(self):
112 # verify failure on building decompress object with bad params
Antoine Pitrou90ee4df2010-04-06 17:23:13 +0000113 self.assertRaises(ValueError, zlib.decompressobj, -1)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000114
Christian Heimes5e696852008-04-09 08:37:03 +0000115 def test_decompressobj_badflush(self):
116 # verify failure on calling decompressobj.flush with bad params
117 self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
118 self.assertRaises(ValueError, zlib.decompressobj().flush, -1)
119
Martin Pantere99e9772015-11-20 08:13:35 +0000120 @support.cpython_only
121 def test_overflow(self):
122 with self.assertRaisesRegex(OverflowError, 'int too large'):
123 zlib.decompress(b'', 15, sys.maxsize + 1)
124 with self.assertRaisesRegex(OverflowError, 'int too large'):
125 zlib.decompressobj().flush(sys.maxsize + 1)
126
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000127
Antoine Pitrou89562712010-05-07 17:04:02 +0000128class BaseCompressTestCase(object):
129 def check_big_compress_buffer(self, size, compress_func):
130 _1M = 1024 * 1024
Antoine Pitrou89562712010-05-07 17:04:02 +0000131 # Generate 10MB worth of random, and expand it by repeating it.
132 # The assumption is that zlib's memory is not big enough to exploit
133 # such spread out redundancy.
134 data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')
135 for i in range(10)])
136 data = data * (size // len(data) + 1)
137 try:
138 compress_func(data)
139 finally:
140 # Release memory
141 data = None
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000142
Antoine Pitrou89562712010-05-07 17:04:02 +0000143 def check_big_decompress_buffer(self, size, decompress_func):
144 data = b'x' * size
145 try:
146 compressed = zlib.compress(data, 1)
147 finally:
148 # Release memory
149 data = None
150 data = decompress_func(compressed)
151 # Sanity check
152 try:
153 self.assertEqual(len(data), size)
154 self.assertEqual(len(data.strip(b'x')), 0)
155 finally:
156 data = None
157
158
159class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000160 # Test compression in one go (whole message compression)
161 def test_speech(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000162 x = zlib.compress(HAMLET_SCENE)
163 self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000164
165 def test_speech128(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000166 # compress more data
167 data = HAMLET_SCENE * 128
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000168 x = zlib.compress(data)
Antoine Pitrou77b338b2009-12-14 18:00:06 +0000169 self.assertEqual(zlib.compress(bytearray(data)), x)
170 for ob in x, bytearray(x):
171 self.assertEqual(zlib.decompress(ob), data)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000172
Antoine Pitrou53b21662010-05-11 23:46:02 +0000173 def test_incomplete_stream(self):
Martin Panter6245cb32016-04-15 02:14:19 +0000174 # A useful error message is given
Antoine Pitrou53b21662010-05-11 23:46:02 +0000175 x = zlib.compress(HAMLET_SCENE)
Ezio Melottied3a7d22010-12-01 02:32:32 +0000176 self.assertRaisesRegex(zlib.error,
Antoine Pitrou53b21662010-05-11 23:46:02 +0000177 "Error -5 while decompressing data: incomplete or truncated stream",
178 zlib.decompress, x[:-1])
179
Antoine Pitrou89562712010-05-07 17:04:02 +0000180 # Memory use of the following functions takes into account overallocation
181
Antoine Pitrou94190bb2011-10-04 10:22:36 +0200182 @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
Antoine Pitrou89562712010-05-07 17:04:02 +0000183 def test_big_compress_buffer(self, size):
184 compress = lambda s: zlib.compress(s, 1)
185 self.check_big_compress_buffer(size, compress)
186
Antoine Pitrou94190bb2011-10-04 10:22:36 +0200187 @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
Antoine Pitrou89562712010-05-07 17:04:02 +0000188 def test_big_decompress_buffer(self, size):
189 self.check_big_decompress_buffer(size, zlib.decompress)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000190
Nadeem Vawda197e22c2012-02-23 14:23:17 +0200191 @bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
Victor Stinner8848c7a2011-01-04 02:07:36 +0000192 def test_length_overflow(self, size):
Victor Stinner8848c7a2011-01-04 02:07:36 +0000193 data = b'x' * size
194 try:
195 self.assertRaises(OverflowError, zlib.compress, data, 1)
Nadeem Vawda154bdf92011-05-14 23:07:36 +0200196 self.assertRaises(OverflowError, zlib.decompress, data)
Victor Stinner8848c7a2011-01-04 02:07:36 +0000197 finally:
198 data = None
199
Martin Pantere99e9772015-11-20 08:13:35 +0000200 @bigmemtest(size=_4G, memuse=1)
201 def test_large_bufsize(self, size):
202 # Test decompress(bufsize) parameter greater than the internal limit
203 data = HAMLET_SCENE * 10
204 compressed = zlib.compress(data, 1)
205 self.assertEqual(zlib.decompress(compressed, 15, size), data)
206
207 def test_custom_bufsize(self):
208 data = HAMLET_SCENE * 10
209 compressed = zlib.compress(data, 1)
210 self.assertEqual(zlib.decompress(compressed, 15, CustomInt()), data)
211
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000212
Antoine Pitrou89562712010-05-07 17:04:02 +0000213class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000214 # Test compression object
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000215 def test_pair(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000216 # straightforward compress/decompress objects
Antoine Pitrou77b338b2009-12-14 18:00:06 +0000217 datasrc = HAMLET_SCENE * 128
218 datazip = zlib.compress(datasrc)
219 # should compress both bytes and bytearray data
220 for data in (datasrc, bytearray(datasrc)):
221 co = zlib.compressobj()
222 x1 = co.compress(data)
223 x2 = co.flush()
224 self.assertRaises(zlib.error, co.flush) # second flush should not work
225 self.assertEqual(x1 + x2, datazip)
226 for v1, v2 in ((x1, x2), (bytearray(x1), bytearray(x2))):
227 dco = zlib.decompressobj()
228 y1 = dco.decompress(v1 + v2)
229 y2 = dco.flush()
230 self.assertEqual(data, y1 + y2)
231 self.assertIsInstance(dco.unconsumed_tail, bytes)
232 self.assertIsInstance(dco.unused_data, bytes)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000233
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000234 def test_compressoptions(self):
235 # specify lots of options to compressobj()
236 level = 2
237 method = zlib.DEFLATED
238 wbits = -12
Martin Panterbf19d162015-09-09 01:01:13 +0000239 memLevel = 9
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000240 strategy = zlib.Z_FILTERED
Martin Panterbf19d162015-09-09 01:01:13 +0000241 co = zlib.compressobj(level, method, wbits, memLevel, strategy)
Neil Schemenauer6412b122004-06-05 19:34:28 +0000242 x1 = co.compress(HAMLET_SCENE)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000243 x2 = co.flush()
244 dco = zlib.decompressobj(wbits)
245 y1 = dco.decompress(x1 + x2)
246 y2 = dco.flush()
Neil Schemenauer6412b122004-06-05 19:34:28 +0000247 self.assertEqual(HAMLET_SCENE, y1 + y2)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000248
Martin Panterbf19d162015-09-09 01:01:13 +0000249 # keyword arguments should also be supported
250 zlib.compressobj(level=level, method=method, wbits=wbits,
251 memLevel=memLevel, strategy=strategy, zdict=b"")
252
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000253 def test_compressincremental(self):
254 # compress object in steps, decompress object as one-shot
Neil Schemenauer6412b122004-06-05 19:34:28 +0000255 data = HAMLET_SCENE * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000256 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000257 bufs = []
258 for i in range(0, len(data), 256):
259 bufs.append(co.compress(data[i:i+256]))
260 bufs.append(co.flush())
Guido van Rossum776152b2007-05-22 22:44:07 +0000261 combuf = b''.join(bufs)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000262
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000263 dco = zlib.decompressobj()
Guido van Rossum776152b2007-05-22 22:44:07 +0000264 y1 = dco.decompress(b''.join(bufs))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000265 y2 = dco.flush()
266 self.assertEqual(data, y1 + y2)
267
Neil Schemenauer6412b122004-06-05 19:34:28 +0000268 def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000269 # compress object in steps, decompress object in steps
Neil Schemenauer6412b122004-06-05 19:34:28 +0000270 source = source or HAMLET_SCENE
271 data = source * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000272 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000273 bufs = []
Neil Schemenauer6412b122004-06-05 19:34:28 +0000274 for i in range(0, len(data), cx):
275 bufs.append(co.compress(data[i:i+cx]))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000276 bufs.append(co.flush())
Guido van Rossum776152b2007-05-22 22:44:07 +0000277 combuf = b''.join(bufs)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000278
Gregory P. Smith693fc462008-09-06 20:13:06 +0000279 decombuf = zlib.decompress(combuf)
280 # Test type of return value
Ezio Melottie9615932010-01-24 19:26:24 +0000281 self.assertIsInstance(decombuf, bytes)
Gregory P. Smith693fc462008-09-06 20:13:06 +0000282
283 self.assertEqual(data, decombuf)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000284
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000285 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000286 bufs = []
Neil Schemenauer6412b122004-06-05 19:34:28 +0000287 for i in range(0, len(combuf), dcx):
288 bufs.append(dco.decompress(combuf[i:i+dcx]))
Guido van Rossum776152b2007-05-22 22:44:07 +0000289 self.assertEqual(b'', dco.unconsumed_tail, ########
290 "(A) uct should be b'': not %d long" %
Neil Schemenauer6412b122004-06-05 19:34:28 +0000291 len(dco.unconsumed_tail))
Amaury Forgeot d'Arce43d33a2008-07-02 20:50:16 +0000292 self.assertEqual(b'', dco.unused_data)
Neil Schemenauer6412b122004-06-05 19:34:28 +0000293 if flush:
294 bufs.append(dco.flush())
295 else:
296 while True:
Antoine Pitrou77b338b2009-12-14 18:00:06 +0000297 chunk = dco.decompress(b'')
Neil Schemenauer6412b122004-06-05 19:34:28 +0000298 if chunk:
299 bufs.append(chunk)
300 else:
301 break
Guido van Rossum776152b2007-05-22 22:44:07 +0000302 self.assertEqual(b'', dco.unconsumed_tail, ########
303 "(B) uct should be b'': not %d long" %
Neil Schemenauer6412b122004-06-05 19:34:28 +0000304 len(dco.unconsumed_tail))
Amaury Forgeot d'Arce43d33a2008-07-02 20:50:16 +0000305 self.assertEqual(b'', dco.unused_data)
Guido van Rossum776152b2007-05-22 22:44:07 +0000306 self.assertEqual(data, b''.join(bufs))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000307 # Failure means: "decompressobj with init options failed"
308
Neil Schemenauer6412b122004-06-05 19:34:28 +0000309 def test_decompincflush(self):
310 self.test_decompinc(flush=True)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000311
Neil Schemenauer6412b122004-06-05 19:34:28 +0000312 def test_decompimax(self, source=None, cx=256, dcx=64):
313 # compress in steps, decompress in length-restricted steps
314 source = source or HAMLET_SCENE
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000315 # Check a decompression object with max_length specified
Neil Schemenauer6412b122004-06-05 19:34:28 +0000316 data = source * 128
317 co = zlib.compressobj()
318 bufs = []
319 for i in range(0, len(data), cx):
320 bufs.append(co.compress(data[i:i+cx]))
321 bufs.append(co.flush())
Guido van Rossum776152b2007-05-22 22:44:07 +0000322 combuf = b''.join(bufs)
Neil Schemenauer6412b122004-06-05 19:34:28 +0000323 self.assertEqual(data, zlib.decompress(combuf),
324 'compressed data failure')
325
326 dco = zlib.decompressobj()
327 bufs = []
328 cb = combuf
329 while cb:
330 #max_length = 1 + len(cb)//10
331 chunk = dco.decompress(cb, dcx)
Benjamin Petersonc9c0f202009-06-30 23:06:06 +0000332 self.assertFalse(len(chunk) > dcx,
Neil Schemenauer6412b122004-06-05 19:34:28 +0000333 'chunk too big (%d>%d)' % (len(chunk), dcx))
334 bufs.append(chunk)
335 cb = dco.unconsumed_tail
336 bufs.append(dco.flush())
Guido van Rossum776152b2007-05-22 22:44:07 +0000337 self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
Neil Schemenauer6412b122004-06-05 19:34:28 +0000338
339 def test_decompressmaxlen(self, flush=False):
340 # Check a decompression object with max_length specified
341 data = HAMLET_SCENE * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000342 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000343 bufs = []
344 for i in range(0, len(data), 256):
345 bufs.append(co.compress(data[i:i+256]))
346 bufs.append(co.flush())
Guido van Rossum776152b2007-05-22 22:44:07 +0000347 combuf = b''.join(bufs)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000348 self.assertEqual(data, zlib.decompress(combuf),
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000349 'compressed data failure')
350
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000351 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000352 bufs = []
353 cb = combuf
354 while cb:
Guido van Rossumf3594102003-02-27 18:39:18 +0000355 max_length = 1 + len(cb)//10
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000356 chunk = dco.decompress(cb, max_length)
Benjamin Petersonc9c0f202009-06-30 23:06:06 +0000357 self.assertFalse(len(chunk) > max_length,
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000358 'chunk too big (%d>%d)' % (len(chunk),max_length))
359 bufs.append(chunk)
360 cb = dco.unconsumed_tail
Neil Schemenauer6412b122004-06-05 19:34:28 +0000361 if flush:
362 bufs.append(dco.flush())
363 else:
364 while chunk:
Antoine Pitrou77b338b2009-12-14 18:00:06 +0000365 chunk = dco.decompress(b'', max_length)
Benjamin Petersonc9c0f202009-06-30 23:06:06 +0000366 self.assertFalse(len(chunk) > max_length,
Neil Schemenauer6412b122004-06-05 19:34:28 +0000367 'chunk too big (%d>%d)' % (len(chunk),max_length))
368 bufs.append(chunk)
Guido van Rossum776152b2007-05-22 22:44:07 +0000369 self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000370
Neil Schemenauer6412b122004-06-05 19:34:28 +0000371 def test_decompressmaxlenflush(self):
372 self.test_decompressmaxlen(flush=True)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000373
374 def test_maxlenmisc(self):
375 # Misc tests of max_length
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000376 dco = zlib.decompressobj()
Antoine Pitrou77b338b2009-12-14 18:00:06 +0000377 self.assertRaises(ValueError, dco.decompress, b"", -1)
Guido van Rossum776152b2007-05-22 22:44:07 +0000378 self.assertEqual(b'', dco.unconsumed_tail)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000379
Martin Pantere99e9772015-11-20 08:13:35 +0000380 def test_maxlen_large(self):
381 # Sizes up to sys.maxsize should be accepted, although zlib is
382 # internally limited to expressing sizes with unsigned int
383 data = HAMLET_SCENE * 10
384 self.assertGreater(len(data), zlib.DEF_BUF_SIZE)
385 compressed = zlib.compress(data, 1)
386 dco = zlib.decompressobj()
387 self.assertEqual(dco.decompress(compressed, sys.maxsize), data)
388
389 def test_maxlen_custom(self):
390 data = HAMLET_SCENE * 10
391 compressed = zlib.compress(data, 1)
392 dco = zlib.decompressobj()
393 self.assertEqual(dco.decompress(compressed, CustomInt()), data[:100])
394
Nadeem Vawda7619e882011-05-14 14:05:20 +0200395 def test_clear_unconsumed_tail(self):
396 # Issue #12050: calling decompress() without providing max_length
397 # should clear the unconsumed_tail attribute.
398 cdata = b"x\x9cKLJ\x06\x00\x02M\x01" # "abc"
399 dco = zlib.decompressobj()
400 ddata = dco.decompress(cdata, 1)
401 ddata += dco.decompress(dco.unconsumed_tail)
402 self.assertEqual(dco.unconsumed_tail, b"")
403
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000404 def test_flushes(self):
405 # Test flush() with the various options, using all the
406 # different levels in order to provide more variations.
407 sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
408 sync_opt = [getattr(zlib, opt) for opt in sync_opt
409 if hasattr(zlib, opt)]
Neil Schemenauer6412b122004-06-05 19:34:28 +0000410 data = HAMLET_SCENE * 8
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000411
412 for sync in sync_opt:
413 for level in range(10):
414 obj = zlib.compressobj( level )
415 a = obj.compress( data[:3000] )
416 b = obj.flush( sync )
417 c = obj.compress( data[3000:] )
418 d = obj.flush()
Guido van Rossum776152b2007-05-22 22:44:07 +0000419 self.assertEqual(zlib.decompress(b''.join([a,b,c,d])),
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000420 data, ("Decompress failed: flush "
421 "mode=%i, level=%i") % (sync, level))
422 del obj
423
Serhiy Storchaka43767632013-11-03 21:31:38 +0200424 @unittest.skipUnless(hasattr(zlib, 'Z_SYNC_FLUSH'),
425 'requires zlib.Z_SYNC_FLUSH')
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000426 def test_odd_flush(self):
427 # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
428 import random
Serhiy Storchaka43767632013-11-03 21:31:38 +0200429 # Testing on 17K of "random" data
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000430
Serhiy Storchaka43767632013-11-03 21:31:38 +0200431 # Create compressor and decompressor objects
432 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
433 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000434
Serhiy Storchaka43767632013-11-03 21:31:38 +0200435 # Try 17K of data
436 # generate random data stream
437 try:
438 # In 2.3 and later, WichmannHill is the RNG of the bug report
439 gen = random.WichmannHill()
440 except AttributeError:
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000441 try:
Serhiy Storchaka43767632013-11-03 21:31:38 +0200442 # 2.2 called it Random
443 gen = random.Random()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000444 except AttributeError:
Serhiy Storchaka43767632013-11-03 21:31:38 +0200445 # others might simply have a single RNG
446 gen = random
447 gen.seed(1)
448 data = genblock(1, 17 * 1024, generator=gen)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000449
Serhiy Storchaka43767632013-11-03 21:31:38 +0200450 # compress, sync-flush, and decompress
451 first = co.compress(data)
452 second = co.flush(zlib.Z_SYNC_FLUSH)
453 expanded = dco.decompress(first + second)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000454
Serhiy Storchaka43767632013-11-03 21:31:38 +0200455 # if decompressed data is different from the input data, choke.
456 self.assertEqual(expanded, data, "17K random source doesn't match")
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000457
Andrew M. Kuchling3b585b32004-12-28 20:10:48 +0000458 def test_empty_flush(self):
459 # Test that calling .flush() on unused objects works.
460 # (Bug #1083110 -- calling .flush() on decompress objects
461 # caused a core dump.)
462
463 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
Benjamin Petersonc9c0f202009-06-30 23:06:06 +0000464 self.assertTrue(co.flush()) # Returns a zlib header
Andrew M. Kuchling3b585b32004-12-28 20:10:48 +0000465 dco = zlib.decompressobj()
Guido van Rossum776152b2007-05-22 22:44:07 +0000466 self.assertEqual(dco.flush(), b"") # Returns nothing
Tim Peters5a9fb3c2005-01-07 16:01:32 +0000467
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200468 def test_dictionary(self):
469 h = HAMLET_SCENE
Nadeem Vawdacf5e1d82012-06-22 00:35:57 +0200470 # Build a simulated dictionary out of the words in HAMLET.
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200471 words = h.split()
472 random.shuffle(words)
473 zdict = b''.join(words)
Nadeem Vawdacf5e1d82012-06-22 00:35:57 +0200474 # Use it to compress HAMLET.
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200475 co = zlib.compressobj(zdict=zdict)
476 cd = co.compress(h) + co.flush()
Nadeem Vawdacf5e1d82012-06-22 00:35:57 +0200477 # Verify that it will decompress with the dictionary.
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200478 dco = zlib.decompressobj(zdict=zdict)
479 self.assertEqual(dco.decompress(cd) + dco.flush(), h)
Nadeem Vawdacf5e1d82012-06-22 00:35:57 +0200480 # Verify that it fails when not given the dictionary.
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200481 dco = zlib.decompressobj()
482 self.assertRaises(zlib.error, dco.decompress, cd)
483
484 def test_dictionary_streaming(self):
Nadeem Vawdacf5e1d82012-06-22 00:35:57 +0200485 # This simulates the reuse of a compressor object for compressing
486 # several separate data streams.
Nadeem Vawdafd8a8382012-06-21 02:13:12 +0200487 co = zlib.compressobj(zdict=HAMLET_SCENE)
488 do = zlib.decompressobj(zdict=HAMLET_SCENE)
489 piece = HAMLET_SCENE[1000:1500]
490 d0 = co.compress(piece) + co.flush(zlib.Z_SYNC_FLUSH)
491 d1 = co.compress(piece[100:]) + co.flush(zlib.Z_SYNC_FLUSH)
492 d2 = co.compress(piece[:-100]) + co.flush(zlib.Z_SYNC_FLUSH)
493 self.assertEqual(do.decompress(d0), piece)
494 self.assertEqual(do.decompress(d1), piece[100:])
495 self.assertEqual(do.decompress(d2), piece[:-100])
496
Antoine Pitrouc09c92f2010-05-11 23:36:40 +0000497 def test_decompress_incomplete_stream(self):
498 # This is 'foo', deflated
499 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
500 # For the record
501 self.assertEqual(zlib.decompress(x), b'foo')
502 self.assertRaises(zlib.error, zlib.decompress, x[:-5])
503 # Omitting the stream end works with decompressor objects
504 # (see issue #8672).
505 dco = zlib.decompressobj()
506 y = dco.decompress(x[:-5])
507 y += dco.flush()
508 self.assertEqual(y, b'foo')
509
Nadeem Vawda1c385462011-08-13 15:22:40 +0200510 def test_decompress_eof(self):
511 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'
512 dco = zlib.decompressobj()
513 self.assertFalse(dco.eof)
514 dco.decompress(x[:-5])
515 self.assertFalse(dco.eof)
516 dco.decompress(x[-5:])
517 self.assertTrue(dco.eof)
518 dco.flush()
519 self.assertTrue(dco.eof)
520
521 def test_decompress_eof_incomplete_stream(self):
522 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'
523 dco = zlib.decompressobj()
524 self.assertFalse(dco.eof)
525 dco.decompress(x[:-5])
526 self.assertFalse(dco.eof)
527 dco.flush()
528 self.assertFalse(dco.eof)
529
Nadeem Vawda39079942012-11-05 00:37:42 +0100530 def test_decompress_unused_data(self):
531 # Repeated calls to decompress() after EOF should accumulate data in
532 # dco.unused_data, instead of just storing the arg to the last call.
Nadeem Vawdaee7889d2012-11-11 02:14:36 +0100533 source = b'abcdefghijklmnopqrstuvwxyz'
534 remainder = b'0123456789'
535 y = zlib.compress(source)
536 x = y + remainder
537 for maxlen in 0, 1000:
538 for step in 1, 2, len(y), len(x):
539 dco = zlib.decompressobj()
540 data = b''
541 for i in range(0, len(x), step):
542 if i < len(y):
543 self.assertEqual(dco.unused_data, b'')
544 if maxlen == 0:
545 data += dco.decompress(x[i : i + step])
546 self.assertEqual(dco.unconsumed_tail, b'')
547 else:
548 data += dco.decompress(
549 dco.unconsumed_tail + x[i : i + step], maxlen)
550 data += dco.flush()
Nadeem Vawdadd1253a2012-11-11 02:21:22 +0100551 self.assertTrue(dco.eof)
Nadeem Vawdaee7889d2012-11-11 02:14:36 +0100552 self.assertEqual(data, source)
553 self.assertEqual(dco.unconsumed_tail, b'')
554 self.assertEqual(dco.unused_data, remainder)
Nadeem Vawda39079942012-11-05 00:37:42 +0100555
Nadeem Vawda7ee95552012-11-11 03:15:32 +0100556 def test_flush_with_freed_input(self):
557 # Issue #16411: decompressor accesses input to last decompress() call
558 # in flush(), even if this object has been freed in the meanwhile.
559 input1 = b'abcdefghijklmnopqrstuvwxyz'
560 input2 = b'QWERTYUIOPASDFGHJKLZXCVBNM'
561 data = zlib.compress(input1)
562 dco = zlib.decompressobj()
563 dco.decompress(data, 1)
564 del data
565 data = zlib.compress(input2)
566 self.assertEqual(dco.flush(), input1[1:])
567
Martin Pantere99e9772015-11-20 08:13:35 +0000568 @bigmemtest(size=_4G, memuse=1)
569 def test_flush_large_length(self, size):
570 # Test flush(length) parameter greater than internal limit UINT_MAX
571 input = HAMLET_SCENE * 10
572 data = zlib.compress(input, 1)
573 dco = zlib.decompressobj()
574 dco.decompress(data, 1)
575 self.assertEqual(dco.flush(size), input[1:])
576
577 def test_flush_custom_length(self):
578 input = HAMLET_SCENE * 10
579 data = zlib.compress(input, 1)
580 dco = zlib.decompressobj()
581 dco.decompress(data, 1)
582 self.assertEqual(dco.flush(CustomInt()), input[1:])
583
Serhiy Storchaka43767632013-11-03 21:31:38 +0200584 @requires_Compress_copy
585 def test_compresscopy(self):
586 # Test copying a compression object
587 data0 = HAMLET_SCENE
588 data1 = bytes(str(HAMLET_SCENE, "ascii").swapcase(), "ascii")
589 c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
590 bufs0 = []
591 bufs0.append(c0.compress(data0))
Thomas Wouters477c8d52006-05-27 19:21:47 +0000592
Serhiy Storchaka43767632013-11-03 21:31:38 +0200593 c1 = c0.copy()
594 bufs1 = bufs0[:]
Thomas Wouters477c8d52006-05-27 19:21:47 +0000595
Serhiy Storchaka43767632013-11-03 21:31:38 +0200596 bufs0.append(c0.compress(data0))
597 bufs0.append(c0.flush())
598 s0 = b''.join(bufs0)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000599
Serhiy Storchaka43767632013-11-03 21:31:38 +0200600 bufs1.append(c1.compress(data1))
601 bufs1.append(c1.flush())
602 s1 = b''.join(bufs1)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000603
Serhiy Storchaka43767632013-11-03 21:31:38 +0200604 self.assertEqual(zlib.decompress(s0),data0+data0)
605 self.assertEqual(zlib.decompress(s1),data0+data1)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000606
Serhiy Storchaka43767632013-11-03 21:31:38 +0200607 @requires_Compress_copy
608 def test_badcompresscopy(self):
609 # Test copying a compression object in an inconsistent state
610 c = zlib.compressobj()
611 c.compress(HAMLET_SCENE)
612 c.flush()
613 self.assertRaises(ValueError, c.copy)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000614
Serhiy Storchaka43767632013-11-03 21:31:38 +0200615 @requires_Decompress_copy
616 def test_decompresscopy(self):
617 # Test copying a decompression object
618 data = HAMLET_SCENE
619 comp = zlib.compress(data)
620 # Test type of return value
621 self.assertIsInstance(comp, bytes)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000622
Serhiy Storchaka43767632013-11-03 21:31:38 +0200623 d0 = zlib.decompressobj()
624 bufs0 = []
625 bufs0.append(d0.decompress(comp[:32]))
Thomas Wouters477c8d52006-05-27 19:21:47 +0000626
Serhiy Storchaka43767632013-11-03 21:31:38 +0200627 d1 = d0.copy()
628 bufs1 = bufs0[:]
Thomas Wouters477c8d52006-05-27 19:21:47 +0000629
Serhiy Storchaka43767632013-11-03 21:31:38 +0200630 bufs0.append(d0.decompress(comp[32:]))
631 s0 = b''.join(bufs0)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000632
Serhiy Storchaka43767632013-11-03 21:31:38 +0200633 bufs1.append(d1.decompress(comp[32:]))
634 s1 = b''.join(bufs1)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000635
Serhiy Storchaka43767632013-11-03 21:31:38 +0200636 self.assertEqual(s0,s1)
637 self.assertEqual(s0,data)
Thomas Wouters477c8d52006-05-27 19:21:47 +0000638
Serhiy Storchaka43767632013-11-03 21:31:38 +0200639 @requires_Decompress_copy
640 def test_baddecompresscopy(self):
641 # Test copying a compression object in an inconsistent state
642 data = zlib.compress(HAMLET_SCENE)
643 d = zlib.decompressobj()
644 d.decompress(data)
645 d.flush()
646 self.assertRaises(ValueError, d.copy)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000647
Serhiy Storchakad7a44152015-11-12 11:23:04 +0200648 def test_compresspickle(self):
649 for proto in range(pickle.HIGHEST_PROTOCOL + 1):
650 with self.assertRaises((TypeError, pickle.PicklingError)):
651 pickle.dumps(zlib.compressobj(zlib.Z_BEST_COMPRESSION), proto)
652
653 def test_decompresspickle(self):
654 for proto in range(pickle.HIGHEST_PROTOCOL + 1):
655 with self.assertRaises((TypeError, pickle.PicklingError)):
656 pickle.dumps(zlib.decompressobj(), proto)
657
Antoine Pitrou89562712010-05-07 17:04:02 +0000658 # Memory use of the following functions takes into account overallocation
659
Antoine Pitrou94190bb2011-10-04 10:22:36 +0200660 @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
Antoine Pitrou89562712010-05-07 17:04:02 +0000661 def test_big_compress_buffer(self, size):
662 c = zlib.compressobj(1)
663 compress = lambda s: c.compress(s) + c.flush()
664 self.check_big_compress_buffer(size, compress)
665
Antoine Pitrou94190bb2011-10-04 10:22:36 +0200666 @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
Antoine Pitrou89562712010-05-07 17:04:02 +0000667 def test_big_decompress_buffer(self, size):
668 d = zlib.decompressobj()
669 decompress = lambda s: d.decompress(s) + d.flush()
670 self.check_big_decompress_buffer(size, decompress)
671
Nadeem Vawda197e22c2012-02-23 14:23:17 +0200672 @bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
Nadeem Vawda0c3d96a2011-05-15 00:19:50 +0200673 def test_length_overflow(self, size):
Nadeem Vawda0c3d96a2011-05-15 00:19:50 +0200674 data = b'x' * size
Nadeem Vawda1161a9c2011-05-15 00:48:24 +0200675 c = zlib.compressobj(1)
676 d = zlib.decompressobj()
Nadeem Vawda0c3d96a2011-05-15 00:19:50 +0200677 try:
Nadeem Vawda1161a9c2011-05-15 00:48:24 +0200678 self.assertRaises(OverflowError, c.compress, data)
679 self.assertRaises(OverflowError, d.decompress, data)
Nadeem Vawda0c3d96a2011-05-15 00:19:50 +0200680 finally:
681 data = None
682
Antoine Pitrou89562712010-05-07 17:04:02 +0000683
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000684def genblock(seed, length, step=1024, generator=random):
685 """length-byte stream of random data from a seed (in step-byte blocks)."""
686 if seed is not None:
687 generator.seed(seed)
688 randint = generator.randint
689 if length < step or step < 2:
690 step = length
Guido van Rossum776152b2007-05-22 22:44:07 +0000691 blocks = bytes()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000692 for i in range(0, length, step):
Guido van Rossum776152b2007-05-22 22:44:07 +0000693 blocks += bytes(randint(0, 255) for x in range(step))
694 return blocks
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000695
696
697
698def choose_lines(source, number, seed=None, generator=random):
699 """Return a list of number lines randomly chosen from the source"""
700 if seed is not None:
701 generator.seed(seed)
702 sources = source.split('\n')
703 return [generator.choice(sources) for n in range(number)]
704
705
706
Guido van Rossum776152b2007-05-22 22:44:07 +0000707HAMLET_SCENE = b"""
Fred Drake004d5e62000-10-23 17:22:08 +0000708LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000709
710 O, fear me not.
711 I stay too long: but here my father comes.
712
713 Enter POLONIUS
714
715 A double blessing is a double grace,
716 Occasion smiles upon a second leave.
717
Fred Drake004d5e62000-10-23 17:22:08 +0000718LORD POLONIUS
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000719
720 Yet here, Laertes! aboard, aboard, for shame!
721 The wind sits in the shoulder of your sail,
722 And you are stay'd for. There; my blessing with thee!
723 And these few precepts in thy memory
724 See thou character. Give thy thoughts no tongue,
725 Nor any unproportioned thought his act.
726 Be thou familiar, but by no means vulgar.
727 Those friends thou hast, and their adoption tried,
728 Grapple them to thy soul with hoops of steel;
729 But do not dull thy palm with entertainment
730 Of each new-hatch'd, unfledged comrade. Beware
731 Of entrance to a quarrel, but being in,
732 Bear't that the opposed may beware of thee.
733 Give every man thy ear, but few thy voice;
734 Take each man's censure, but reserve thy judgment.
735 Costly thy habit as thy purse can buy,
736 But not express'd in fancy; rich, not gaudy;
737 For the apparel oft proclaims the man,
738 And they in France of the best rank and station
739 Are of a most select and generous chief in that.
740 Neither a borrower nor a lender be;
741 For loan oft loses both itself and friend,
742 And borrowing dulls the edge of husbandry.
743 This above all: to thine ownself be true,
744 And it must follow, as the night the day,
745 Thou canst not then be false to any man.
746 Farewell: my blessing season this in thee!
747
Fred Drake004d5e62000-10-23 17:22:08 +0000748LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000749
750 Most humbly do I take my leave, my lord.
751
Fred Drake004d5e62000-10-23 17:22:08 +0000752LORD POLONIUS
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000753
754 The time invites you; go; your servants tend.
755
Fred Drake004d5e62000-10-23 17:22:08 +0000756LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000757
758 Farewell, Ophelia; and remember well
759 What I have said to you.
760
Fred Drake004d5e62000-10-23 17:22:08 +0000761OPHELIA
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000762
763 'Tis in my memory lock'd,
764 And you yourself shall keep the key of it.
765
Fred Drake004d5e62000-10-23 17:22:08 +0000766LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000767
768 Farewell.
769"""
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000770
771
Martin Pantere99e9772015-11-20 08:13:35 +0000772class CustomInt:
773 def __int__(self):
774 return 100
775
776
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000777if __name__ == "__main__":
Zachary Ware38c707e2015-04-13 15:00:43 -0500778 unittest.main()