blob: c79d7ea8a38e47b8292d9f9a23bb0744a0efe7c8 [file] [log] [blame]
Kyungsik Leecffb78b2013-07-08 16:01:45 -07001/*
2 * lz4defs.h -- architecture specific defines
3 *
4 * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11/*
12 * Detects 64 bits mode
13 */
Rui Salvaterra3e26a692016-04-09 22:05:34 +010014#if defined(CONFIG_64BIT)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070015#define LZ4_ARCH64 1
16#else
17#define LZ4_ARCH64 0
18#endif
19
20/*
21 * Architecture-specific macros
22 */
23#define BYTE u8
Chanho Minc72ac7a2013-07-08 16:01:49 -070024typedef struct _U16_S { u16 v; } U16_S;
25typedef struct _U32_S { u32 v; } U32_S;
26typedef struct _U64_S { u64 v; } U64_S;
Rui Salvaterradea5c242016-04-09 22:05:35 +010027#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070028
Chanho Minc72ac7a2013-07-08 16:01:49 -070029#define A16(x) (((U16_S *)(x))->v)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070030#define A32(x) (((U32_S *)(x))->v)
31#define A64(x) (((U64_S *)(x))->v)
32
33#define PUT4(s, d) (A32(d) = A32(s))
34#define PUT8(s, d) (A64(d) = A64(s))
Rui Salvaterra3e26a692016-04-09 22:05:34 +010035
36#define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
37 (d = s - A16(p))
38
Chanho Minc72ac7a2013-07-08 16:01:49 -070039#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
40 do { \
41 A16(p) = v; \
42 p += 2; \
43 } while (0)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070044#else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
45
Chanho Minc72ac7a2013-07-08 16:01:49 -070046#define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
47#define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
48#define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
49
Kyungsik Leecffb78b2013-07-08 16:01:45 -070050#define PUT4(s, d) \
51 put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
52#define PUT8(s, d) \
53 put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
Chanho Minc72ac7a2013-07-08 16:01:49 -070054
Rui Salvaterra3e26a692016-04-09 22:05:34 +010055#define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
56 (d = s - get_unaligned_le16(p))
57
58#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
59 do { \
60 put_unaligned_le16(v, (u16 *)(p)); \
61 p += 2; \
Chanho Minc72ac7a2013-07-08 16:01:49 -070062 } while (0)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070063#endif
64
65#define COPYLENGTH 8
66#define ML_BITS 4
67#define ML_MASK ((1U << ML_BITS) - 1)
68#define RUN_BITS (8 - ML_BITS)
69#define RUN_MASK ((1U << RUN_BITS) - 1)
Chanho Minc72ac7a2013-07-08 16:01:49 -070070#define MEMORY_USAGE 14
71#define MINMATCH 4
72#define SKIPSTRENGTH 6
73#define LASTLITERALS 5
74#define MFLIMIT (COPYLENGTH + MINMATCH)
75#define MINLENGTH (MFLIMIT + 1)
76#define MAXD_LOG 16
77#define MAXD (1 << MAXD_LOG)
78#define MAXD_MASK (u32)(MAXD - 1)
79#define MAX_DISTANCE (MAXD - 1)
80#define HASH_LOG (MAXD_LOG - 1)
81#define HASHTABLESIZE (1 << HASH_LOG)
82#define MAX_NB_ATTEMPTS 256
83#define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
84#define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
85#define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
86#define HASH64KTABLESIZE (1U << HASHLOG64K)
87#define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
88 ((MINMATCH * 8) - (MEMORY_USAGE-2)))
89#define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
90 ((MINMATCH * 8) - HASHLOG64K))
91#define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
92 ((MINMATCH * 8) - HASH_LOG))
Kyungsik Leecffb78b2013-07-08 16:01:45 -070093
94#if LZ4_ARCH64/* 64-bit */
95#define STEPSIZE 8
96
97#define LZ4_COPYSTEP(s, d) \
98 do { \
99 PUT8(s, d); \
100 d += 8; \
101 s += 8; \
102 } while (0)
103
104#define LZ4_COPYPACKET(s, d) LZ4_COPYSTEP(s, d)
105
106#define LZ4_SECURECOPY(s, d, e) \
107 do { \
108 if (d < e) { \
109 LZ4_WILDCOPY(s, d, e); \
110 } \
111 } while (0)
Chanho Minc72ac7a2013-07-08 16:01:49 -0700112#define HTYPE u32
113
114#ifdef __BIG_ENDIAN
115#define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
116#else
117#define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
118#endif
Kyungsik Leecffb78b2013-07-08 16:01:45 -0700119
120#else /* 32-bit */
121#define STEPSIZE 4
122
123#define LZ4_COPYSTEP(s, d) \
124 do { \
125 PUT4(s, d); \
126 d += 4; \
127 s += 4; \
128 } while (0)
129
130#define LZ4_COPYPACKET(s, d) \
131 do { \
132 LZ4_COPYSTEP(s, d); \
133 LZ4_COPYSTEP(s, d); \
134 } while (0)
135
136#define LZ4_SECURECOPY LZ4_WILDCOPY
Chanho Minc72ac7a2013-07-08 16:01:49 -0700137#define HTYPE const u8*
138
139#ifdef __BIG_ENDIAN
140#define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
141#else
142#define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
143#endif
144
Kyungsik Leecffb78b2013-07-08 16:01:45 -0700145#endif
146
Kyungsik Leecffb78b2013-07-08 16:01:45 -0700147#define LZ4_WILDCOPY(s, d, e) \
148 do { \
149 LZ4_COPYPACKET(s, d); \
150 } while (d < e)
Chanho Minc72ac7a2013-07-08 16:01:49 -0700151
152#define LZ4_BLINDCOPY(s, d, l) \
153 do { \
154 u8 *e = (d) + l; \
155 LZ4_WILDCOPY(s, d, e); \
156 d = e; \
157 } while (0)