Linux 4.6-rc6
[cris-mirror.git] / lib / lz4 / lz4defs.h
blobc79d7ea8a38e47b8292d9f9a23bb0744a0efe7c8
1 /*
2 * lz4defs.h -- architecture specific defines
4 * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
12 * Detects 64 bits mode
14 #if defined(CONFIG_64BIT)
15 #define LZ4_ARCH64 1
16 #else
17 #define LZ4_ARCH64 0
18 #endif
21 * Architecture-specific macros
23 #define BYTE u8
24 typedef struct _U16_S { u16 v; } U16_S;
25 typedef struct _U32_S { u32 v; } U32_S;
26 typedef struct _U64_S { u64 v; } U64_S;
27 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)
29 #define A16(x) (((U16_S *)(x))->v)
30 #define A32(x) (((U32_S *)(x))->v)
31 #define A64(x) (((U64_S *)(x))->v)
33 #define PUT4(s, d) (A32(d) = A32(s))
34 #define PUT8(s, d) (A64(d) = A64(s))
36 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
37 (d = s - A16(p))
39 #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
40 do { \
41 A16(p) = v; \
42 p += 2; \
43 } while (0)
44 #else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
46 #define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
47 #define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
48 #define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
50 #define PUT4(s, d) \
51 put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
52 #define PUT8(s, d) \
53 put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
55 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
56 (d = s - get_unaligned_le16(p))
58 #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
59 do { \
60 put_unaligned_le16(v, (u16 *)(p)); \
61 p += 2; \
62 } while (0)
63 #endif
65 #define COPYLENGTH 8
66 #define ML_BITS 4
67 #define ML_MASK ((1U << ML_BITS) - 1)
68 #define RUN_BITS (8 - ML_BITS)
69 #define RUN_MASK ((1U << RUN_BITS) - 1)
70 #define MEMORY_USAGE 14
71 #define MINMATCH 4
72 #define SKIPSTRENGTH 6
73 #define LASTLITERALS 5
74 #define MFLIMIT (COPYLENGTH + MINMATCH)
75 #define MINLENGTH (MFLIMIT + 1)
76 #define MAXD_LOG 16
77 #define MAXD (1 << MAXD_LOG)
78 #define MAXD_MASK (u32)(MAXD - 1)
79 #define MAX_DISTANCE (MAXD - 1)
80 #define HASH_LOG (MAXD_LOG - 1)
81 #define HASHTABLESIZE (1 << HASH_LOG)
82 #define MAX_NB_ATTEMPTS 256
83 #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
84 #define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
85 #define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
86 #define HASH64KTABLESIZE (1U << HASHLOG64K)
87 #define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
88 ((MINMATCH * 8) - (MEMORY_USAGE-2)))
89 #define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
90 ((MINMATCH * 8) - HASHLOG64K))
91 #define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
92 ((MINMATCH * 8) - HASH_LOG))
94 #if LZ4_ARCH64/* 64-bit */
95 #define STEPSIZE 8
97 #define LZ4_COPYSTEP(s, d) \
98 do { \
99 PUT8(s, d); \
100 d += 8; \
101 s += 8; \
102 } while (0)
104 #define LZ4_COPYPACKET(s, d) LZ4_COPYSTEP(s, d)
106 #define LZ4_SECURECOPY(s, d, e) \
107 do { \
108 if (d < e) { \
109 LZ4_WILDCOPY(s, d, e); \
111 } while (0)
112 #define HTYPE u32
114 #ifdef __BIG_ENDIAN
115 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
116 #else
117 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
118 #endif
120 #else /* 32-bit */
121 #define STEPSIZE 4
123 #define LZ4_COPYSTEP(s, d) \
124 do { \
125 PUT4(s, d); \
126 d += 4; \
127 s += 4; \
128 } while (0)
130 #define LZ4_COPYPACKET(s, d) \
131 do { \
132 LZ4_COPYSTEP(s, d); \
133 LZ4_COPYSTEP(s, d); \
134 } while (0)
136 #define LZ4_SECURECOPY LZ4_WILDCOPY
137 #define HTYPE const u8*
139 #ifdef __BIG_ENDIAN
140 #define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
141 #else
142 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
143 #endif
145 #endif
147 #define LZ4_WILDCOPY(s, d, e) \
148 do { \
149 LZ4_COPYPACKET(s, d); \
150 } while (d < e)
152 #define LZ4_BLINDCOPY(s, d, l) \
153 do { \
154 u8 *e = (d) + l; \
155 LZ4_WILDCOPY(s, d, e); \
156 d = e; \
157 } while (0)