blob: c8fc985cfd77c8239409a666307dcbd790f84d5a [file] [log] [blame]
Guido van Rossum7d9ea502003-02-03 20:45:52 +00001import unittest
Victor Stinnerf9fb4342011-05-03 15:19:23 +02002from test.test_support import TESTFN, run_unittest, import_module, unlink, requires
Gregory P. Smithc856fa82008-03-18 22:27:41 +00003import binascii
Serhiy Storchaka76e6cc12015-11-12 11:36:42 +02004import pickle
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +00005import random
Victor Stinner7fd90c42011-05-04 21:27:39 +02006from test.test_support import precisionbigmemtest, _1G, _4G
Victor Stinnerf9fb4342011-05-03 15:19:23 +02007import sys
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +00008
Victor Stinnerf9fb4342011-05-03 15:19:23 +02009try:
10 import mmap
11except ImportError:
12 mmap = None
13
14zlib = import_module('zlib')
R. David Murray3db8a342009-03-30 23:05:48 +000015
Serhiy Storchaka32e23e72013-11-03 23:15:46 +020016requires_Compress_copy = unittest.skipUnless(
17 hasattr(zlib.compressobj(), "copy"),
18 'requires Compress.copy()')
19requires_Decompress_copy = unittest.skipUnless(
20 hasattr(zlib.decompressobj(), "copy"),
21 'requires Decompress.copy()')
22
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +000023
Guido van Rossum7d9ea502003-02-03 20:45:52 +000024class ChecksumTestCase(unittest.TestCase):
25 # checksum test cases
26 def test_crc32start(self):
27 self.assertEqual(zlib.crc32(""), zlib.crc32("", 0))
Benjamin Peterson5c8da862009-06-30 22:57:08 +000028 self.assertTrue(zlib.crc32("abc", 0xffffffff))
Andrew M. Kuchlingfcfc8d52001-08-10 15:50:11 +000029
Guido van Rossum7d9ea502003-02-03 20:45:52 +000030 def test_crc32empty(self):
31 self.assertEqual(zlib.crc32("", 0), 0)
32 self.assertEqual(zlib.crc32("", 1), 1)
33 self.assertEqual(zlib.crc32("", 432), 432)
Andrew M. Kuchling9a0f98e2001-02-21 02:17:01 +000034
Guido van Rossum7d9ea502003-02-03 20:45:52 +000035 def test_adler32start(self):
36 self.assertEqual(zlib.adler32(""), zlib.adler32("", 1))
Benjamin Peterson5c8da862009-06-30 22:57:08 +000037 self.assertTrue(zlib.adler32("abc", 0xffffffff))
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +000038
Guido van Rossum7d9ea502003-02-03 20:45:52 +000039 def test_adler32empty(self):
40 self.assertEqual(zlib.adler32("", 0), 0)
41 self.assertEqual(zlib.adler32("", 1), 1)
42 self.assertEqual(zlib.adler32("", 432), 432)
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +000043
Guido van Rossum7d9ea502003-02-03 20:45:52 +000044 def assertEqual32(self, seen, expected):
45 # 32-bit values masked -- checksums on 32- vs 64- bit machines
46 # This is important if bit 31 (0x08000000L) is set.
47 self.assertEqual(seen & 0x0FFFFFFFFL, expected & 0x0FFFFFFFFL)
48
49 def test_penguins(self):
50 self.assertEqual32(zlib.crc32("penguin", 0), 0x0e5c1a120L)
51 self.assertEqual32(zlib.crc32("penguin", 1), 0x43b6aa94)
52 self.assertEqual32(zlib.adler32("penguin", 0), 0x0bcf02f6)
53 self.assertEqual32(zlib.adler32("penguin", 1), 0x0bd602f7)
54
55 self.assertEqual(zlib.crc32("penguin"), zlib.crc32("penguin", 0))
56 self.assertEqual(zlib.adler32("penguin"),zlib.adler32("penguin",1))
57
Gregory P. Smithf48f9d32008-03-17 18:48:05 +000058 def test_abcdefghijklmnop(self):
59 """test issue1202 compliance: signed crc32, adler32 in 2.x"""
60 foo = 'abcdefghijklmnop'
61 # explicitly test signed behavior
62 self.assertEqual(zlib.crc32(foo), -1808088941)
63 self.assertEqual(zlib.crc32('spam'), 1138425661)
64 self.assertEqual(zlib.adler32(foo+foo), -721416943)
65 self.assertEqual(zlib.adler32('spam'), 72286642)
66
Gregory P. Smithc856fa82008-03-18 22:27:41 +000067 def test_same_as_binascii_crc32(self):
68 foo = 'abcdefghijklmnop'
69 self.assertEqual(binascii.crc32(foo), zlib.crc32(foo))
70 self.assertEqual(binascii.crc32('spam'), zlib.crc32('spam'))
71
Gregory P. Smith88440962008-03-25 06:12:45 +000072 def test_negative_crc_iv_input(self):
73 # The range of valid input values for the crc state should be
74 # -2**31 through 2**32-1 to allow inputs artifically constrained
75 # to a signed 32-bit integer.
76 self.assertEqual(zlib.crc32('ham', -1), zlib.crc32('ham', 0xffffffffL))
77 self.assertEqual(zlib.crc32('spam', -3141593),
78 zlib.crc32('spam', 0xffd01027L))
79 self.assertEqual(zlib.crc32('spam', -(2**31)),
80 zlib.crc32('spam', (2**31)))
Guido van Rossum7d9ea502003-02-03 20:45:52 +000081
82
83class ExceptionTestCase(unittest.TestCase):
84 # make sure we generate some expected errors
Armin Rigoec560192007-10-15 07:48:35 +000085 def test_badlevel(self):
86 # specifying compression level out of range causes an error
87 # (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
88 # accepts 0 too)
89 self.assertRaises(zlib.error, zlib.compress, 'ERROR', 10)
Guido van Rossum7d9ea502003-02-03 20:45:52 +000090
91 def test_badcompressobj(self):
92 # verify failure on building compress object with bad params
Neil Schemenauer94afd3e2004-06-05 19:02:52 +000093 self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
Armin Rigoec560192007-10-15 07:48:35 +000094 # specifying total bits too large causes an error
95 self.assertRaises(ValueError,
96 zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
Guido van Rossum7d9ea502003-02-03 20:45:52 +000097
98 def test_baddecompressobj(self):
99 # verify failure on building decompress object with bad params
Antoine Pitrou3b4c9892010-04-06 17:21:09 +0000100 self.assertRaises(ValueError, zlib.decompressobj, -1)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000101
Gregory P. Smith79e42a02008-04-09 00:25:17 +0000102 def test_decompressobj_badflush(self):
103 # verify failure on calling decompressobj.flush with bad params
104 self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
105 self.assertRaises(ValueError, zlib.decompressobj().flush, -1)
106
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000107
Antoine Pitrou3843cd82010-05-07 16:50:34 +0000108class BaseCompressTestCase(object):
109 def check_big_compress_buffer(self, size, compress_func):
110 _1M = 1024 * 1024
111 fmt = "%%0%dx" % (2 * _1M)
112 # Generate 10MB worth of random, and expand it by repeating it.
113 # The assumption is that zlib's memory is not big enough to exploit
114 # such spread out redundancy.
115 data = ''.join([binascii.a2b_hex(fmt % random.getrandbits(8 * _1M))
116 for i in range(10)])
117 data = data * (size // len(data) + 1)
118 try:
119 compress_func(data)
120 finally:
121 # Release memory
122 data = None
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000123
Antoine Pitrou3843cd82010-05-07 16:50:34 +0000124 def check_big_decompress_buffer(self, size, decompress_func):
125 data = 'x' * size
126 try:
127 compressed = zlib.compress(data, 1)
128 finally:
129 # Release memory
130 data = None
131 data = decompress_func(compressed)
132 # Sanity check
133 try:
134 self.assertEqual(len(data), size)
135 self.assertEqual(len(data.strip('x')), 0)
136 finally:
137 data = None
138
139
140class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000141 # Test compression in one go (whole message compression)
142 def test_speech(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000143 x = zlib.compress(HAMLET_SCENE)
144 self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000145
146 def test_speech128(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000147 # compress more data
148 data = HAMLET_SCENE * 128
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000149 x = zlib.compress(data)
150 self.assertEqual(zlib.decompress(x), data)
151
Antoine Pitroufc3bfad2010-05-11 23:42:28 +0000152 def test_incomplete_stream(self):
153 # An useful error message is given
154 x = zlib.compress(HAMLET_SCENE)
155 self.assertRaisesRegexp(zlib.error,
156 "Error -5 while decompressing data: incomplete or truncated stream",
157 zlib.decompress, x[:-1])
158
Antoine Pitrou3843cd82010-05-07 16:50:34 +0000159 # Memory use of the following functions takes into account overallocation
160
161 @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
162 def test_big_compress_buffer(self, size):
163 compress = lambda s: zlib.compress(s, 1)
164 self.check_big_compress_buffer(size, compress)
165
166 @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
167 def test_big_decompress_buffer(self, size):
168 self.check_big_decompress_buffer(size, zlib.decompress)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000169
170
Antoine Pitrou3843cd82010-05-07 16:50:34 +0000171class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000172 # Test compression object
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000173 def test_pair(self):
Neil Schemenauer6412b122004-06-05 19:34:28 +0000174 # straightforward compress/decompress objects
175 data = HAMLET_SCENE * 128
176 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000177 x1 = co.compress(data)
178 x2 = co.flush()
179 self.assertRaises(zlib.error, co.flush) # second flush should not work
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000180 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000181 y1 = dco.decompress(x1 + x2)
182 y2 = dco.flush()
183 self.assertEqual(data, y1 + y2)
184
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000185 def test_compressoptions(self):
186 # specify lots of options to compressobj()
187 level = 2
188 method = zlib.DEFLATED
189 wbits = -12
190 memlevel = 9
191 strategy = zlib.Z_FILTERED
192 co = zlib.compressobj(level, method, wbits, memlevel, strategy)
Neil Schemenauer6412b122004-06-05 19:34:28 +0000193 x1 = co.compress(HAMLET_SCENE)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000194 x2 = co.flush()
195 dco = zlib.decompressobj(wbits)
196 y1 = dco.decompress(x1 + x2)
197 y2 = dco.flush()
Neil Schemenauer6412b122004-06-05 19:34:28 +0000198 self.assertEqual(HAMLET_SCENE, y1 + y2)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000199
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000200 def test_compressincremental(self):
201 # compress object in steps, decompress object as one-shot
Neil Schemenauer6412b122004-06-05 19:34:28 +0000202 data = HAMLET_SCENE * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000203 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000204 bufs = []
205 for i in range(0, len(data), 256):
206 bufs.append(co.compress(data[i:i+256]))
207 bufs.append(co.flush())
208 combuf = ''.join(bufs)
209
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000210 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000211 y1 = dco.decompress(''.join(bufs))
212 y2 = dco.flush()
213 self.assertEqual(data, y1 + y2)
214
Neil Schemenauer6412b122004-06-05 19:34:28 +0000215 def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000216 # compress object in steps, decompress object in steps
Neil Schemenauer6412b122004-06-05 19:34:28 +0000217 source = source or HAMLET_SCENE
218 data = source * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000219 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000220 bufs = []
Neil Schemenauer6412b122004-06-05 19:34:28 +0000221 for i in range(0, len(data), cx):
222 bufs.append(co.compress(data[i:i+cx]))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000223 bufs.append(co.flush())
224 combuf = ''.join(bufs)
225
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000226 self.assertEqual(data, zlib.decompress(combuf))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000227
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000228 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000229 bufs = []
Neil Schemenauer6412b122004-06-05 19:34:28 +0000230 for i in range(0, len(combuf), dcx):
231 bufs.append(dco.decompress(combuf[i:i+dcx]))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000232 self.assertEqual('', dco.unconsumed_tail, ########
233 "(A) uct should be '': not %d long" %
Neil Schemenauer6412b122004-06-05 19:34:28 +0000234 len(dco.unconsumed_tail))
235 if flush:
236 bufs.append(dco.flush())
237 else:
238 while True:
239 chunk = dco.decompress('')
240 if chunk:
241 bufs.append(chunk)
242 else:
243 break
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000244 self.assertEqual('', dco.unconsumed_tail, ########
Neil Schemenauer6412b122004-06-05 19:34:28 +0000245 "(B) uct should be '': not %d long" %
246 len(dco.unconsumed_tail))
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000247 self.assertEqual(data, ''.join(bufs))
248 # Failure means: "decompressobj with init options failed"
249
Neil Schemenauer6412b122004-06-05 19:34:28 +0000250 def test_decompincflush(self):
251 self.test_decompinc(flush=True)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000252
Neil Schemenauer6412b122004-06-05 19:34:28 +0000253 def test_decompimax(self, source=None, cx=256, dcx=64):
254 # compress in steps, decompress in length-restricted steps
255 source = source or HAMLET_SCENE
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000256 # Check a decompression object with max_length specified
Neil Schemenauer6412b122004-06-05 19:34:28 +0000257 data = source * 128
258 co = zlib.compressobj()
259 bufs = []
260 for i in range(0, len(data), cx):
261 bufs.append(co.compress(data[i:i+cx]))
262 bufs.append(co.flush())
263 combuf = ''.join(bufs)
264 self.assertEqual(data, zlib.decompress(combuf),
265 'compressed data failure')
266
267 dco = zlib.decompressobj()
268 bufs = []
269 cb = combuf
270 while cb:
271 #max_length = 1 + len(cb)//10
272 chunk = dco.decompress(cb, dcx)
Benjamin Peterson5c8da862009-06-30 22:57:08 +0000273 self.assertFalse(len(chunk) > dcx,
Neil Schemenauer6412b122004-06-05 19:34:28 +0000274 'chunk too big (%d>%d)' % (len(chunk), dcx))
275 bufs.append(chunk)
276 cb = dco.unconsumed_tail
277 bufs.append(dco.flush())
278 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
279
280 def test_decompressmaxlen(self, flush=False):
281 # Check a decompression object with max_length specified
282 data = HAMLET_SCENE * 128
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000283 co = zlib.compressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000284 bufs = []
285 for i in range(0, len(data), 256):
286 bufs.append(co.compress(data[i:i+256]))
287 bufs.append(co.flush())
288 combuf = ''.join(bufs)
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000289 self.assertEqual(data, zlib.decompress(combuf),
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000290 'compressed data failure')
291
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000292 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000293 bufs = []
294 cb = combuf
295 while cb:
Guido van Rossumf3594102003-02-27 18:39:18 +0000296 max_length = 1 + len(cb)//10
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000297 chunk = dco.decompress(cb, max_length)
Benjamin Peterson5c8da862009-06-30 22:57:08 +0000298 self.assertFalse(len(chunk) > max_length,
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000299 'chunk too big (%d>%d)' % (len(chunk),max_length))
300 bufs.append(chunk)
301 cb = dco.unconsumed_tail
Neil Schemenauer6412b122004-06-05 19:34:28 +0000302 if flush:
303 bufs.append(dco.flush())
304 else:
305 while chunk:
306 chunk = dco.decompress('', max_length)
Benjamin Peterson5c8da862009-06-30 22:57:08 +0000307 self.assertFalse(len(chunk) > max_length,
Neil Schemenauer6412b122004-06-05 19:34:28 +0000308 'chunk too big (%d>%d)' % (len(chunk),max_length))
309 bufs.append(chunk)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000310 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
311
Neil Schemenauer6412b122004-06-05 19:34:28 +0000312 def test_decompressmaxlenflush(self):
313 self.test_decompressmaxlen(flush=True)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000314
315 def test_maxlenmisc(self):
316 # Misc tests of max_length
Neil Schemenauer94afd3e2004-06-05 19:02:52 +0000317 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000318 self.assertRaises(ValueError, dco.decompress, "", -1)
319 self.assertEqual('', dco.unconsumed_tail)
320
Nadeem Vawda0cc4fd92011-05-14 14:29:07 +0200321 def test_clear_unconsumed_tail(self):
322 # Issue #12050: calling decompress() without providing max_length
323 # should clear the unconsumed_tail attribute.
324 cdata = "x\x9cKLJ\x06\x00\x02M\x01" # "abc"
325 dco = zlib.decompressobj()
326 ddata = dco.decompress(cdata, 1)
327 ddata += dco.decompress(dco.unconsumed_tail)
328 self.assertEqual(dco.unconsumed_tail, "")
329
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000330 def test_flushes(self):
331 # Test flush() with the various options, using all the
332 # different levels in order to provide more variations.
333 sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
334 sync_opt = [getattr(zlib, opt) for opt in sync_opt
335 if hasattr(zlib, opt)]
Neil Schemenauer6412b122004-06-05 19:34:28 +0000336 data = HAMLET_SCENE * 8
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000337
338 for sync in sync_opt:
339 for level in range(10):
340 obj = zlib.compressobj( level )
341 a = obj.compress( data[:3000] )
342 b = obj.flush( sync )
343 c = obj.compress( data[3000:] )
344 d = obj.flush()
345 self.assertEqual(zlib.decompress(''.join([a,b,c,d])),
346 data, ("Decompress failed: flush "
347 "mode=%i, level=%i") % (sync, level))
348 del obj
349
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200350 @unittest.skipUnless(hasattr(zlib, 'Z_SYNC_FLUSH'),
351 'requires zlib.Z_SYNC_FLUSH')
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000352 def test_odd_flush(self):
353 # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
354 import random
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200355 # Testing on 17K of "random" data
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000356
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200357 # Create compressor and decompressor objects
358 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
359 dco = zlib.decompressobj()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000360
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200361 # Try 17K of data
362 # generate random data stream
363 try:
364 # In 2.3 and later, WichmannHill is the RNG of the bug report
365 gen = random.WichmannHill()
366 except AttributeError:
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000367 try:
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200368 # 2.2 called it Random
369 gen = random.Random()
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000370 except AttributeError:
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200371 # others might simply have a single RNG
372 gen = random
373 gen.seed(1)
374 data = genblock(1, 17 * 1024, generator=gen)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000375
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200376 # compress, sync-flush, and decompress
377 first = co.compress(data)
378 second = co.flush(zlib.Z_SYNC_FLUSH)
379 expanded = dco.decompress(first + second)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000380
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200381 # if decompressed data is different from the input data, choke.
382 self.assertEqual(expanded, data, "17K random source doesn't match")
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000383
Andrew M. Kuchling3b585b32004-12-28 20:10:48 +0000384 def test_empty_flush(self):
385 # Test that calling .flush() on unused objects works.
386 # (Bug #1083110 -- calling .flush() on decompress objects
387 # caused a core dump.)
388
389 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
Benjamin Peterson5c8da862009-06-30 22:57:08 +0000390 self.assertTrue(co.flush()) # Returns a zlib header
Andrew M. Kuchling3b585b32004-12-28 20:10:48 +0000391 dco = zlib.decompressobj()
392 self.assertEqual(dco.flush(), "") # Returns nothing
Tim Peters5a9fb3c2005-01-07 16:01:32 +0000393
Antoine Pitrou37ffc3e2010-05-11 23:32:31 +0000394 def test_decompress_incomplete_stream(self):
395 # This is 'foo', deflated
396 x = 'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
397 # For the record
398 self.assertEqual(zlib.decompress(x), 'foo')
399 self.assertRaises(zlib.error, zlib.decompress, x[:-5])
400 # Omitting the stream end works with decompressor objects
401 # (see issue #8672).
402 dco = zlib.decompressobj()
403 y = dco.decompress(x[:-5])
404 y += dco.flush()
405 self.assertEqual(y, 'foo')
406
Nadeem Vawda3c309702012-11-11 03:14:56 +0100407 def test_flush_with_freed_input(self):
408 # Issue #16411: decompressor accesses input to last decompress() call
409 # in flush(), even if this object has been freed in the meanwhile.
410 input1 = 'abcdefghijklmnopqrstuvwxyz'
411 input2 = 'QWERTYUIOPASDFGHJKLZXCVBNM'
412 data = zlib.compress(input1)
413 dco = zlib.decompressobj()
414 dco.decompress(data, 1)
415 del data
416 data = zlib.compress(input2)
417 self.assertEqual(dco.flush(), input1[1:])
418
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200419 @requires_Compress_copy
420 def test_compresscopy(self):
421 # Test copying a compression object
422 data0 = HAMLET_SCENE
423 data1 = HAMLET_SCENE.swapcase()
424 c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
425 bufs0 = []
426 bufs0.append(c0.compress(data0))
Georg Brandl8d3342b2006-05-16 07:38:27 +0000427
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200428 c1 = c0.copy()
429 bufs1 = bufs0[:]
Georg Brandl8d3342b2006-05-16 07:38:27 +0000430
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200431 bufs0.append(c0.compress(data0))
432 bufs0.append(c0.flush())
433 s0 = ''.join(bufs0)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000434
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200435 bufs1.append(c1.compress(data1))
436 bufs1.append(c1.flush())
437 s1 = ''.join(bufs1)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000438
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200439 self.assertEqual(zlib.decompress(s0),data0+data0)
440 self.assertEqual(zlib.decompress(s1),data0+data1)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000441
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200442 @requires_Compress_copy
443 def test_badcompresscopy(self):
444 # Test copying a compression object in an inconsistent state
445 c = zlib.compressobj()
446 c.compress(HAMLET_SCENE)
447 c.flush()
448 self.assertRaises(ValueError, c.copy)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000449
Nadeem Vawda6cad3712012-11-05 00:55:06 +0100450 def test_decompress_unused_data(self):
451 # Repeated calls to decompress() after EOF should accumulate data in
452 # dco.unused_data, instead of just storing the arg to the last call.
Nadeem Vawda252f4dc2012-11-11 02:14:15 +0100453 source = b'abcdefghijklmnopqrstuvwxyz'
454 remainder = b'0123456789'
455 y = zlib.compress(source)
456 x = y + remainder
457 for maxlen in 0, 1000:
458 for step in 1, 2, len(y), len(x):
459 dco = zlib.decompressobj()
460 data = b''
461 for i in range(0, len(x), step):
462 if i < len(y):
463 self.assertEqual(dco.unused_data, b'')
464 if maxlen == 0:
465 data += dco.decompress(x[i : i + step])
466 self.assertEqual(dco.unconsumed_tail, b'')
467 else:
468 data += dco.decompress(
469 dco.unconsumed_tail + x[i : i + step], maxlen)
470 data += dco.flush()
471 self.assertEqual(data, source)
472 self.assertEqual(dco.unconsumed_tail, b'')
473 self.assertEqual(dco.unused_data, remainder)
Nadeem Vawda6cad3712012-11-05 00:55:06 +0100474
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200475 @requires_Decompress_copy
476 def test_decompresscopy(self):
477 # Test copying a decompression object
478 data = HAMLET_SCENE
479 comp = zlib.compress(data)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000480
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200481 d0 = zlib.decompressobj()
482 bufs0 = []
483 bufs0.append(d0.decompress(comp[:32]))
Georg Brandl8d3342b2006-05-16 07:38:27 +0000484
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200485 d1 = d0.copy()
486 bufs1 = bufs0[:]
Georg Brandl8d3342b2006-05-16 07:38:27 +0000487
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200488 bufs0.append(d0.decompress(comp[32:]))
489 s0 = ''.join(bufs0)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000490
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200491 bufs1.append(d1.decompress(comp[32:]))
492 s1 = ''.join(bufs1)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000493
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200494 self.assertEqual(s0,s1)
495 self.assertEqual(s0,data)
Georg Brandl8d3342b2006-05-16 07:38:27 +0000496
Serhiy Storchaka32e23e72013-11-03 23:15:46 +0200497 @requires_Decompress_copy
498 def test_baddecompresscopy(self):
499 # Test copying a compression object in an inconsistent state
500 data = zlib.compress(HAMLET_SCENE)
501 d = zlib.decompressobj()
502 d.decompress(data)
503 d.flush()
504 self.assertRaises(ValueError, d.copy)
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000505
Serhiy Storchaka76e6cc12015-11-12 11:36:42 +0200506 def test_compresspickle(self):
507 for proto in range(pickle.HIGHEST_PROTOCOL + 1):
508 with self.assertRaises((TypeError, pickle.PicklingError)):
509 pickle.dumps(zlib.compressobj(zlib.Z_BEST_COMPRESSION), proto)
510
511 def test_decompresspickle(self):
512 for proto in range(pickle.HIGHEST_PROTOCOL + 1):
513 with self.assertRaises((TypeError, pickle.PicklingError)):
514 pickle.dumps(zlib.decompressobj(), proto)
515
Antoine Pitrou3843cd82010-05-07 16:50:34 +0000516 # Memory use of the following functions takes into account overallocation
517
518 @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
519 def test_big_compress_buffer(self, size):
520 c = zlib.compressobj(1)
521 compress = lambda s: c.compress(s) + c.flush()
522 self.check_big_compress_buffer(size, compress)
523
524 @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
525 def test_big_decompress_buffer(self, size):
526 d = zlib.decompressobj()
527 decompress = lambda s: d.decompress(s) + d.flush()
528 self.check_big_decompress_buffer(size, decompress)
529
530
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000531def genblock(seed, length, step=1024, generator=random):
532 """length-byte stream of random data from a seed (in step-byte blocks)."""
533 if seed is not None:
534 generator.seed(seed)
535 randint = generator.randint
536 if length < step or step < 2:
537 step = length
538 blocks = []
539 for i in range(0, length, step):
540 blocks.append(''.join([chr(randint(0,255))
541 for x in range(step)]))
542 return ''.join(blocks)[:length]
543
544
545
546def choose_lines(source, number, seed=None, generator=random):
547 """Return a list of number lines randomly chosen from the source"""
548 if seed is not None:
549 generator.seed(seed)
550 sources = source.split('\n')
551 return [generator.choice(sources) for n in range(number)]
552
553
554
Neil Schemenauer6412b122004-06-05 19:34:28 +0000555HAMLET_SCENE = """
Fred Drake004d5e62000-10-23 17:22:08 +0000556LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000557
558 O, fear me not.
559 I stay too long: but here my father comes.
560
561 Enter POLONIUS
562
563 A double blessing is a double grace,
564 Occasion smiles upon a second leave.
565
Fred Drake004d5e62000-10-23 17:22:08 +0000566LORD POLONIUS
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000567
568 Yet here, Laertes! aboard, aboard, for shame!
569 The wind sits in the shoulder of your sail,
570 And you are stay'd for. There; my blessing with thee!
571 And these few precepts in thy memory
572 See thou character. Give thy thoughts no tongue,
573 Nor any unproportioned thought his act.
574 Be thou familiar, but by no means vulgar.
575 Those friends thou hast, and their adoption tried,
576 Grapple them to thy soul with hoops of steel;
577 But do not dull thy palm with entertainment
578 Of each new-hatch'd, unfledged comrade. Beware
579 Of entrance to a quarrel, but being in,
580 Bear't that the opposed may beware of thee.
581 Give every man thy ear, but few thy voice;
582 Take each man's censure, but reserve thy judgment.
583 Costly thy habit as thy purse can buy,
584 But not express'd in fancy; rich, not gaudy;
585 For the apparel oft proclaims the man,
586 And they in France of the best rank and station
587 Are of a most select and generous chief in that.
588 Neither a borrower nor a lender be;
589 For loan oft loses both itself and friend,
590 And borrowing dulls the edge of husbandry.
591 This above all: to thine ownself be true,
592 And it must follow, as the night the day,
593 Thou canst not then be false to any man.
594 Farewell: my blessing season this in thee!
595
Fred Drake004d5e62000-10-23 17:22:08 +0000596LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000597
598 Most humbly do I take my leave, my lord.
599
Fred Drake004d5e62000-10-23 17:22:08 +0000600LORD POLONIUS
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000601
602 The time invites you; go; your servants tend.
603
Fred Drake004d5e62000-10-23 17:22:08 +0000604LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000605
606 Farewell, Ophelia; and remember well
607 What I have said to you.
608
Fred Drake004d5e62000-10-23 17:22:08 +0000609OPHELIA
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000610
611 'Tis in my memory lock'd,
612 And you yourself shall keep the key of it.
613
Fred Drake004d5e62000-10-23 17:22:08 +0000614LAERTES
Jeremy Hylton6eb4b6a1997-08-15 15:59:43 +0000615
616 Farewell.
617"""
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000618
619
620def test_main():
Victor Stinnerf9fb4342011-05-03 15:19:23 +0200621 run_unittest(
Walter Dörwald21d3a322003-05-01 17:45:56 +0000622 ChecksumTestCase,
623 ExceptionTestCase,
624 CompressTestCase,
625 CompressObjectTestCase
626 )
Guido van Rossum7d9ea502003-02-03 20:45:52 +0000627
628if __name__ == "__main__":
629 test_main()