blob: f791c5b387e68ee8761ae31f23b76373ed67f62a [file] [log] [blame]
Yann Collet32fb4072017-08-18 16:52:05 -07001/*
Yann Collet4ded9e52016-08-30 10:04:33 -07002 * Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
3 * All rights reserved.
4 *
Yann Collet32fb4072017-08-18 16:52:05 -07005 * This source code is licensed under both the BSD-style license (found in the
6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7 * in the COPYING file in the root directory of this source tree).
Yann Collet3128e032017-09-08 00:09:23 -07008 * You may select, at your option, one of the above-listed licenses.
Yann Collet4ded9e52016-08-30 10:04:33 -07009 */
Yann Collet2acb5d32015-10-29 16:49:43 +010010
Yann Collet2acb5d32015-10-29 16:49:43 +010011#ifndef ZSTD_CCOMMON_H_MODULE
12#define ZSTD_CCOMMON_H_MODULE
13
Yann Collet8b6aecf2017-11-07 15:27:06 -080014/* this module contains definitions which must be identical
15 * across compression, decompression and dictBuilder.
16 * It also contains a few functions useful to at least 2 of them
17 * and which benefit from being inlined */
Yann Collet5c956d52016-09-06 15:05:19 +020018
Yann Collet7d360282016-02-12 00:07:30 +010019/*-*************************************
Yann Collet953ce722016-02-04 15:28:14 +010020* Dependencies
Yann Collet2acb5d32015-10-29 16:49:43 +010021***************************************/
Nick Terrell565e9252017-08-14 17:20:50 -070022#include "compiler.h"
Yann Collet2acb5d32015-10-29 16:49:43 +010023#include "mem.h"
Yann Colletfa41bcc2018-06-13 14:59:26 -040024#include "debug.h" /* assert, DEBUGLOG, RAWLOG, g_debuglevel */
Yann Collet977f1f32016-01-21 15:38:47 +010025#include "error_private.h"
Yann Colletd3b7f8d2016-06-04 19:47:02 +020026#define ZSTD_STATIC_LINKING_ONLY
27#include "zstd.h"
Nick Terrellde0414b2017-07-12 19:08:24 -070028#define FSE_STATIC_LINKING_ONLY
29#include "fse.h"
30#define HUF_STATIC_LINKING_ONLY
31#include "huf.h"
Yann Collet4bcc69b2017-03-01 11:33:25 -080032#ifndef XXH_STATIC_LINKING_ONLY
Yann Collet58e8d792017-06-02 18:20:48 -070033# define XXH_STATIC_LINKING_ONLY /* XXH64_state_t */
Yann Collet4bcc69b2017-03-01 11:33:25 -080034#endif
Yann Collet58e8d792017-06-02 18:20:48 -070035#include "xxhash.h" /* XXH_reset, update, digest */
36
Nick Terrellde6c6bc2017-08-24 18:09:50 -070037#if defined (__cplusplus)
38extern "C" {
39#endif
40
Yann Colletfa41bcc2018-06-13 14:59:26 -040041/* ---- static assert (debug) --- */
42#define ZSTD_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c)
Yann Colletccd2d422018-10-23 17:25:49 -070043#define ZSTD_isError ERR_isError /* for inlining */
44#define FSE_isError ERR_isError
45#define HUF_isError ERR_isError
Yann Collet2acb5d32015-10-29 16:49:43 +010046
47
Yann Collet7d360282016-02-12 00:07:30 +010048/*-*************************************
Yann Collet3e21ec52016-09-06 15:36:19 +020049* shared macros
Yann Collet14983e72015-11-11 21:38:21 +010050***************************************/
Yann Collet4f818182017-04-17 17:57:35 -070051#undef MIN
52#undef MAX
Yann Colletbe2010e2015-10-31 12:57:14 +010053#define MIN(a,b) ((a)<(b) ? (a) : (b))
Yann Collet14983e72015-11-11 21:38:21 +010054#define MAX(a,b) ((a)>(b) ? (a) : (b))
W. Felix Handte7ebd8972019-01-28 17:16:32 -050055
W. Felix Handtea3538bb2019-01-28 12:34:08 -050056/**
57 * Return the specified error if the condition evaluates to true.
58 *
Yann Collet0b0b83e2019-08-03 16:43:34 +020059 * In debug modes, prints additional information.
60 * In order to do that (particularly, printing the conditional that failed),
61 * this can't just wrap RETURN_ERROR().
W. Felix Handtea3538bb2019-01-28 12:34:08 -050062 */
W. Felix Handte54fa31f2018-12-05 16:23:18 -080063#define RETURN_ERROR_IF(cond, err, ...) \
64 if (cond) { \
W. Felix Handte324e9652019-01-28 12:50:36 -050065 RAWLOG(3, "%s:%d: ERROR!: check %s failed, returning %s", __FILE__, __LINE__, ZSTD_QUOTE(cond), ZSTD_QUOTE(ERROR(err))); \
W. Felix Handte54fa31f2018-12-05 16:23:18 -080066 RAWLOG(3, ": " __VA_ARGS__); \
67 RAWLOG(3, "\n"); \
68 return ERROR(err); \
69 }
70
W. Felix Handtea3538bb2019-01-28 12:34:08 -050071/**
72 * Unconditionally return the specified error.
73 *
74 * In debug modes, prints additional information.
75 */
76#define RETURN_ERROR(err, ...) \
77 do { \
W. Felix Handte324e9652019-01-28 12:50:36 -050078 RAWLOG(3, "%s:%d: ERROR!: unconditional check failed, returning %s", __FILE__, __LINE__, ZSTD_QUOTE(ERROR(err))); \
W. Felix Handtea3538bb2019-01-28 12:34:08 -050079 RAWLOG(3, ": " __VA_ARGS__); \
80 RAWLOG(3, "\n"); \
81 return ERROR(err); \
82 } while(0);
83
84/**
85 * If the provided expression evaluates to an error code, returns that error code.
86 *
87 * In debug modes, prints additional information.
88 */
W. Felix Handte501eb252019-01-29 12:56:07 -050089#define FORWARD_IF_ERROR(err, ...) \
W. Felix Handtea3538bb2019-01-28 12:34:08 -050090 do { \
91 size_t const err_code = (err); \
92 if (ERR_isError(err_code)) { \
W. Felix Handte324e9652019-01-28 12:50:36 -050093 RAWLOG(3, "%s:%d: ERROR!: forwarding error in %s: %s", __FILE__, __LINE__, ZSTD_QUOTE(err), ERR_getErrorName(err_code)); \
W. Felix Handtea3538bb2019-01-28 12:34:08 -050094 RAWLOG(3, ": " __VA_ARGS__); \
95 RAWLOG(3, "\n"); \
96 return err_code; \
97 } \
98 } while(0);
99
Yann Collet2acb5d32015-10-29 16:49:43 +0100100
Yann Collet7d360282016-02-12 00:07:30 +0100101/*-*************************************
Yann Collet14983e72015-11-11 21:38:21 +0100102* Common constants
103***************************************/
inikep87d4f3d2016-03-02 15:56:24 +0100104#define ZSTD_OPT_NUM (1<<12)
Yann Collet88fcd292015-11-25 14:42:45 +0100105
inikep5f49eba2016-08-10 15:01:53 +0200106#define ZSTD_REP_NUM 3 /* number of repcodes */
inikep5f49eba2016-08-10 15:01:53 +0200107#define ZSTD_REP_MOVE (ZSTD_REP_NUM-1)
Yann Collet4266c0a2016-06-14 01:49:25 +0200108static const U32 repStartValue[ZSTD_REP_NUM] = { 1, 4, 8 };
Yann Collet14983e72015-11-11 21:38:21 +0100109
110#define KB *(1 <<10)
111#define MB *(1 <<20)
112#define GB *(1U<<30)
Yann Collet2acb5d32015-10-29 16:49:43 +0100113
Yann Collet14983e72015-11-11 21:38:21 +0100114#define BIT7 128
115#define BIT6 64
116#define BIT5 32
117#define BIT4 16
118#define BIT1 2
119#define BIT0 1
120
Yann Collet673f0d72016-06-06 00:26:38 +0200121#define ZSTD_WINDOWLOG_ABSOLUTEMIN 10
122static const size_t ZSTD_fcs_fieldSize[4] = { 0, 2, 4, 8 };
Yann Colletc46fb922016-05-29 05:01:04 +0200123static const size_t ZSTD_did_fieldSize[4] = { 0, 1, 2, 4 };
Yann Collet37f3d1b2016-03-19 15:11:42 +0100124
Yann Collet6e66bbf2018-08-14 12:56:21 -0700125#define ZSTD_FRAMEIDSIZE 4 /* magic number size */
Yann Colletb8d4a382017-09-25 15:25:07 -0700126
Yann Collet49bb0042016-06-04 20:17:38 +0200127#define ZSTD_BLOCKHEADERSIZE 3 /* C standard doesn't allow `static const` variable to be init using another `static const` variable */
Yann Collet37f3d1b2016-03-19 15:11:42 +0100128static const size_t ZSTD_blockHeaderSize = ZSTD_BLOCKHEADERSIZE;
Yann Colletc991cc12016-07-28 00:55:43 +0200129typedef enum { bt_raw, bt_rle, bt_compressed, bt_reserved } blockType_e;
Yann Collet37f3d1b2016-03-19 15:11:42 +0100130
131#define MIN_SEQUENCES_SIZE 1 /* nbSeq==0 */
132#define MIN_CBLOCK_SIZE (1 /*litCSize*/ + 1 /* RLE or RAW */ + MIN_SEQUENCES_SIZE /* nbSeq==0 */) /* for a non-null block */
133
134#define HufLog 12
Yann Colletf8e7b532016-07-23 16:31:49 +0200135typedef enum { set_basic, set_rle, set_compressed, set_repeat } symbolEncodingType_e;
Yann Collet14983e72015-11-11 21:38:21 +0100136
Yann Collet37f3d1b2016-03-19 15:11:42 +0100137#define LONGNBSEQ 0x7F00
138
inikep7bc19b62016-04-06 09:46:01 +0200139#define MINMATCH 3
Yann Collet14983e72015-11-11 21:38:21 +0100140
inikep3bfcfc72016-02-03 18:47:30 +0100141#define Litbits 8
inikep70b05452016-02-03 22:56:55 +0100142#define MaxLit ((1<<Litbits) - 1)
Yann Collet95424402018-02-09 04:25:15 -0800143#define MaxML 52
144#define MaxLL 35
Nick Terrellbbe77212017-09-18 16:54:53 -0700145#define DefaultMaxOff 28
Yann Collet95424402018-02-09 04:25:15 -0800146#define MaxOff 31
Yann Collet4db09ef2016-03-18 22:23:49 +0100147#define MaxSeq MAX(MaxLL, MaxML) /* Assumption : MaxOff < MaxLL,MaxML */
Yann Colletbe391432016-03-22 23:19:28 +0100148#define MLFSELog 9
Yann Colletd64f4352016-03-21 00:07:42 +0100149#define LLFSELog 9
Yann Collet646693e2016-03-24 02:31:27 +0100150#define OffFSELog 8
Yann Collet95424402018-02-09 04:25:15 -0800151#define MaxFSELog MAX(MAX(MLFSELog, LLFSELog), OffFSELog)
inikepf3c65032016-03-04 20:04:25 +0100152
Yann Collet4191efa2017-11-08 11:05:32 -0800153static const U32 LL_bits[MaxLL+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
154 0, 0, 0, 0, 0, 0, 0, 0,
155 1, 1, 1, 1, 2, 2, 3, 3,
156 4, 6, 7, 8, 9,10,11,12,
Yann Colletb0aec172016-03-21 13:24:16 +0100157 13,14,15,16 };
Yann Collet4191efa2017-11-08 11:05:32 -0800158static const S16 LL_defaultNorm[MaxLL+1] = { 4, 3, 2, 2, 2, 2, 2, 2,
159 2, 2, 2, 2, 2, 1, 1, 1,
160 2, 2, 2, 2, 2, 2, 2, 2,
161 2, 3, 2, 1, 1, 1, 1, 1,
Yann Collet48537162016-04-07 15:24:29 +0200162 -1,-1,-1,-1 };
Yann Collet51f4d562016-09-22 15:57:28 +0200163#define LL_DEFAULTNORMLOG 6 /* for static allocation */
164static const U32 LL_defaultNormLog = LL_DEFAULTNORMLOG;
Yann Colletb0aec172016-03-21 13:24:16 +0100165
Yann Collet4191efa2017-11-08 11:05:32 -0800166static const U32 ML_bits[MaxML+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
167 0, 0, 0, 0, 0, 0, 0, 0,
168 0, 0, 0, 0, 0, 0, 0, 0,
169 0, 0, 0, 0, 0, 0, 0, 0,
170 1, 1, 1, 1, 2, 2, 3, 3,
171 4, 4, 5, 7, 8, 9,10,11,
Yann Collet48537162016-04-07 15:24:29 +0200172 12,13,14,15,16 };
Yann Collet4191efa2017-11-08 11:05:32 -0800173static const S16 ML_defaultNorm[MaxML+1] = { 1, 4, 3, 2, 2, 2, 2, 2,
174 2, 1, 1, 1, 1, 1, 1, 1,
175 1, 1, 1, 1, 1, 1, 1, 1,
176 1, 1, 1, 1, 1, 1, 1, 1,
177 1, 1, 1, 1, 1, 1, 1, 1,
178 1, 1, 1, 1, 1, 1,-1,-1,
Yann Collet48537162016-04-07 15:24:29 +0200179 -1,-1,-1,-1,-1 };
Yann Collet51f4d562016-09-22 15:57:28 +0200180#define ML_DEFAULTNORMLOG 6 /* for static allocation */
181static const U32 ML_defaultNormLog = ML_DEFAULTNORMLOG;
Yann Colletfadda6c2016-03-22 12:14:26 +0100182
Yann Collet4191efa2017-11-08 11:05:32 -0800183static const S16 OF_defaultNorm[DefaultMaxOff+1] = { 1, 1, 1, 1, 1, 1, 2, 2,
184 2, 1, 1, 1, 1, 1, 1, 1,
185 1, 1, 1, 1, 1, 1, 1, 1,
186 -1,-1,-1,-1,-1 };
Yann Collet51f4d562016-09-22 15:57:28 +0200187#define OF_DEFAULTNORMLOG 5 /* for static allocation */
188static const U32 OF_defaultNormLog = OF_DEFAULTNORMLOG;
Yann Collet48537162016-04-07 15:24:29 +0200189
Yann Collet2acb5d32015-10-29 16:49:43 +0100190
Yann Colletfb810d62016-01-28 00:18:06 +0100191/*-*******************************************
Yann Collet14983e72015-11-11 21:38:21 +0100192* Shared functions to include for inlining
Yann Colletfb810d62016-01-28 00:18:06 +0100193*********************************************/
Yann Collet2acb5d32015-10-29 16:49:43 +0100194static void ZSTD_copy8(void* dst, const void* src) { memcpy(dst, src, 8); }
mgrice812e8f22019-07-11 15:31:07 -0700195
Yann Collet2acb5d32015-10-29 16:49:43 +0100196#define COPY8(d,s) { ZSTD_copy8(d,s); d+=8; s+=8; }
mgrice812e8f22019-07-11 15:31:07 -0700197static void ZSTD_copy16(void* dst, const void* src) { memcpy(dst, src, 16); }
198#define COPY16(d,s) { ZSTD_copy16(d,s); d+=16; s+=16; }
199
Nick Terrellcdad7fa2019-09-20 00:52:15 -0700200#define WILDCOPY_OVERLENGTH 32
Nick Terrellefd37a62019-09-19 13:25:03 -0700201#define WILDCOPY_VECLEN 16
mgrice812e8f22019-07-11 15:31:07 -0700202
203typedef enum {
204 ZSTD_no_overlap,
205 ZSTD_overlap_src_before_dst,
206 /* ZSTD_overlap_dst_before_src, */
207} ZSTD_overlap_e;
Yann Collet2acb5d32015-10-29 16:49:43 +0100208
Yann Collet953ce722016-02-04 15:28:14 +0100209/*! ZSTD_wildcopy() :
Nick Terrellefd37a62019-09-19 13:25:03 -0700210 * Custom version of memcpy(), can over read/write up to WILDCOPY_OVERLENGTH bytes (if length==0)
211 * @param ovtype controls the overlap detection
212 * - ZSTD_no_overlap: The source and destination are guaranteed to be at least WILDCOPY_VECLEN bytes apart.
213 * - ZSTD_overlap_src_before_dst: The src and dst may overlap, but they MUST be at least 8 bytes apart.
214 * The src buffer must be before the dst buffer.
215 */
mgrice812e8f22019-07-11 15:31:07 -0700216MEM_STATIC FORCE_INLINE_ATTR DONT_VECTORIZE
Nick Terrell44c65da2019-09-20 12:23:25 -0700217void ZSTD_wildcopy(void* dst, const void* src, ptrdiff_t length, ZSTD_overlap_e const ovtype)
Yann Collet2acb5d32015-10-29 16:49:43 +0100218{
mgrice812e8f22019-07-11 15:31:07 -0700219 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src;
Yann Collet2acb5d32015-10-29 16:49:43 +0100220 const BYTE* ip = (const BYTE*)src;
221 BYTE* op = (BYTE*)dst;
222 BYTE* const oend = op + length;
mgrice812e8f22019-07-11 15:31:07 -0700223
Nick Terrellefd37a62019-09-19 13:25:03 -0700224 assert(diff >= 8 || (ovtype == ZSTD_no_overlap && diff <= -WILDCOPY_VECLEN));
mgriceb8305992019-08-27 14:49:23 -0700225
Nick Terrellefd37a62019-09-19 13:25:03 -0700226 if (ovtype == ZSTD_overlap_src_before_dst && diff < WILDCOPY_VECLEN) {
227 /* Handle short offset copies. */
mgriceb8305992019-08-27 14:49:23 -0700228 do {
Nick Terrellefd37a62019-09-19 13:25:03 -0700229 COPY8(op, ip)
230 } while (op < oend);
231 } else {
232 assert(diff >= WILDCOPY_VECLEN || diff <= -WILDCOPY_VECLEN);
233 /* Separate out the first two COPY16() calls because the copy length is
234 * almost certain to be short, so the branches have different
235 * probabilities.
236 * On gcc-9 unrolling once is +1.6%, twice is +2%, thrice is +1.8%.
237 * On clang-8 unrolling once is +1.4%, twice is +3.3%, thrice is +3%.
238 */
239 COPY16(op, ip);
Nick Terrellefd37a62019-09-19 13:25:03 -0700240 COPY16(op, ip);
241 if (op >= oend) return;
242 do {
243 COPY16(op, ip);
Nick Terrellcdad7fa2019-09-20 00:52:15 -0700244 COPY16(op, ip);
mgriceb8305992019-08-27 14:49:23 -0700245 }
246 while (op < oend);
mgrice812e8f22019-07-11 15:31:07 -0700247 }
248}
249
Nick Terrellefd37a62019-09-19 13:25:03 -0700250/*! ZSTD_wildcopy8() :
251 * The same as ZSTD_wildcopy(), but it can only overwrite 8 bytes, and works for
252 * overlapping buffers that are at least 8 bytes apart.
253 */
254MEM_STATIC void ZSTD_wildcopy8(void* dst, const void* src, ptrdiff_t length)
Yann Collet280f9a82016-08-08 00:44:00 +0200255{
256 const BYTE* ip = (const BYTE*)src;
257 BYTE* op = (BYTE*)dst;
Nick Terrellefd37a62019-09-19 13:25:03 -0700258 BYTE* const oend = (BYTE*)op + length;
259 do {
Nick Terrellcdad7fa2019-09-20 00:52:15 -0700260 COPY8(op, ip);
Nick Terrellefd37a62019-09-19 13:25:03 -0700261 } while (op < oend);
Yann Collet280f9a82016-08-08 00:44:00 +0200262}
263
Yann Collet7d360282016-02-12 00:07:30 +0100264
265/*-*******************************************
Yann Collet8b6aecf2017-11-07 15:27:06 -0800266* Private declarations
Yann Collet7d360282016-02-12 00:07:30 +0100267*********************************************/
Yann Colleted57d852016-07-29 21:22:17 +0200268typedef struct seqDef_s {
269 U32 offset;
270 U16 litLength;
271 U16 matchLength;
272} seqDef;
273
inikep87d4f3d2016-03-02 15:56:24 +0100274typedef struct {
Yann Colletc0ce4f12016-07-30 00:55:13 +0200275 seqDef* sequencesStart;
Yann Colleted57d852016-07-29 21:22:17 +0200276 seqDef* sequences;
Yann Collet7d360282016-02-12 00:07:30 +0100277 BYTE* litStart;
278 BYTE* lit;
Yann Colleted57d852016-07-29 21:22:17 +0200279 BYTE* llCode;
280 BYTE* mlCode;
281 BYTE* ofCode;
Nick Terrell924944e2018-08-21 14:20:02 -0700282 size_t maxNbSeq;
Nick Terrell5e580de2018-08-28 13:24:44 -0700283 size_t maxNbLit;
Yann Collet5d393572016-04-07 17:19:00 +0200284 U32 longLengthID; /* 0 == no longLength; 1 == Lit.longLength; 2 == Match.longLength; */
285 U32 longLengthPos;
Nick Terrell7a28b9e2017-07-17 15:29:11 -0700286} seqStore_t;
287
shakeelrao19b75b62019-03-15 18:04:19 -0700288/**
289 * Contains the compressed frame size and an upper-bound for the decompressed frame size.
shakeelrao0033bb42019-03-17 17:41:27 -0700290 * Note: before using `compressedSize`, check for errors using ZSTD_isError().
291 * similarly, before using `decompressedBound`, check for errors using:
292 * `decompressedBound != ZSTD_CONTENTSIZE_ERROR`
shakeelrao19b75b62019-03-15 18:04:19 -0700293 */
294typedef struct {
295 size_t compressedSize;
296 unsigned long long decompressedBound;
shakeelrao0033bb42019-03-17 17:41:27 -0700297} ZSTD_frameSizeInfo; /* decompress & legacy */
shakeelrao19b75b62019-03-15 18:04:19 -0700298
Yann Collet8b6aecf2017-11-07 15:27:06 -0800299const seqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx); /* compress & dictBuilder */
300void ZSTD_seqToCodes(const seqStore_t* seqStorePtr); /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */
Yann Collet2acb5d32015-10-29 16:49:43 +0100301
inikepc4807f42016-06-02 15:11:39 +0200302/* custom memory allocation functions */
Yann Collet23b6e052016-08-28 21:05:43 -0700303void* ZSTD_malloc(size_t size, ZSTD_customMem customMem);
Yann Collet44e45e82017-05-30 16:12:06 -0700304void* ZSTD_calloc(size_t size, ZSTD_customMem customMem);
Yann Collet23b6e052016-08-28 21:05:43 -0700305void ZSTD_free(void* ptr, ZSTD_customMem customMem);
306
inikep2a746092016-06-03 14:53:51 +0200307
Yann Collet8b6aecf2017-11-07 15:27:06 -0800308MEM_STATIC U32 ZSTD_highbit32(U32 val) /* compress, dictBuilder, decodeCorpus */
Yann Colletc154d9d2016-07-27 14:37:00 +0200309{
Stella Laue50ed1f2017-08-22 11:55:42 -0700310 assert(val != 0);
311 {
Yann Colletc154d9d2016-07-27 14:37:00 +0200312# if defined(_MSC_VER) /* Visual */
Stella Laue50ed1f2017-08-22 11:55:42 -0700313 unsigned long r=0;
314 _BitScanReverse(&r, val);
315 return (unsigned)r;
Yann Colletc154d9d2016-07-27 14:37:00 +0200316# elif defined(__GNUC__) && (__GNUC__ >= 3) /* GCC Intrinsic */
Dávid Bolvanský1ab1a402019-09-23 21:32:56 +0200317 return __builtin_clz (val) ^ 31;
Joseph Chen3855bc42019-07-29 15:20:37 +0800318# elif defined(__ICCARM__) /* IAR Intrinsic */
319 return 31 - __CLZ(val);
Yann Colletc154d9d2016-07-27 14:37:00 +0200320# else /* Software version */
Yann Collet0a0a2122017-11-28 14:07:03 -0800321 static const U32 DeBruijnClz[32] = { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 };
Stella Laue50ed1f2017-08-22 11:55:42 -0700322 U32 v = val;
Stella Laue50ed1f2017-08-22 11:55:42 -0700323 v |= v >> 1;
324 v |= v >> 2;
325 v |= v >> 4;
326 v |= v >> 8;
327 v |= v >> 16;
Yann Collet0a0a2122017-11-28 14:07:03 -0800328 return DeBruijnClz[(v * 0x07C4ACDDU) >> 27];
Yann Colletc154d9d2016-07-27 14:37:00 +0200329# endif
Stella Laue50ed1f2017-08-22 11:55:42 -0700330 }
Yann Colletc154d9d2016-07-27 14:37:00 +0200331}
332
333
Yann Collet32dfae62017-01-19 10:32:55 -0800334/* ZSTD_invalidateRepCodes() :
335 * ensures next compression will not use repcodes from previous block.
336 * Note : only works with regular variant;
337 * do not use with extDict variant ! */
Yann Collet8b6aecf2017-11-07 15:27:06 -0800338void ZSTD_invalidateRepCodes(ZSTD_CCtx* cctx); /* zstdmt, adaptive_compression (shouldn't get this definition from here) */
Yann Collet32dfae62017-01-19 10:32:55 -0800339
340
Yann Colletf04deff2017-07-06 01:42:46 -0700341typedef struct {
342 blockType_e blockType;
343 U32 lastBlock;
344 U32 origSize;
Yann Collet2b491402018-10-25 16:28:41 -0700345} blockProperties_t; /* declared here for decompress and fullbench */
Yann Colletf04deff2017-07-06 01:42:46 -0700346
347/*! ZSTD_getcBlockSize() :
Yann Collet4191efa2017-11-08 11:05:32 -0800348 * Provides the size of compressed block from block header `src` */
349/* Used by: decompress, fullbench (does not get its definition from here) */
Yann Colletf04deff2017-07-06 01:42:46 -0700350size_t ZSTD_getcBlockSize(const void* src, size_t srcSize,
351 blockProperties_t* bpPtr);
352
Yann Collet2b491402018-10-25 16:28:41 -0700353/*! ZSTD_decodeSeqHeaders() :
354 * decode sequence header from src */
355/* Used by: decompress, fullbench (does not get its definition from here) */
356size_t ZSTD_decodeSeqHeaders(ZSTD_DCtx* dctx, int* nbSeqPtr,
357 const void* src, size_t srcSize);
358
359
Nick Terrellde6c6bc2017-08-24 18:09:50 -0700360#if defined (__cplusplus)
361}
362#endif
Yann Colletf04deff2017-07-06 01:42:46 -0700363
Yann Collet2acb5d32015-10-29 16:49:43 +0100364#endif /* ZSTD_CCOMMON_H_MODULE */