1 // Copyright 2011 Google Inc. All Rights Reserved.
3 // Use of this source code is governed by a BSD-style license
4 // that can be found in the COPYING file in the root of the source
5 // tree. An additional intellectual property rights grant can be found
6 // in the file PATENTS. All contributing project authors may
7 // be found in the AUTHORS file in the root of the source tree.
8 // -----------------------------------------------------------------------------
10 // Author: Jyrki Alakuijala (jyrki@google.com)
12 // Entropy encoding (Huffman) for webp lossless.
17 #include "./huffman_encode.h"
18 #include "../utils/utils.h"
19 #include "../webp/format_constants.h"
21 // -----------------------------------------------------------------------------
22 // Util function to optimize the symbol map for RLE coding
24 // Heuristics for selecting the stride ranges to collapse.
25 static int ValuesShouldBeCollapsedToStrideAverage(int a
, int b
) {
26 return abs(a
- b
) < 4;
29 // Change the population counts in a way that the consequent
30 // Huffman tree compression, especially its RLE-part, give smaller output.
31 static int OptimizeHuffmanForRle(int length
, int* const counts
) {
32 uint8_t* good_for_rle
;
33 // 1) Let's make the Huffman code more compatible with rle encoding.
35 for (; length
>= 0; --length
) {
37 return 1; // All zeros.
39 if (counts
[length
- 1] != 0) {
40 // Now counts[0..length - 1] does not have trailing zeros.
44 // 2) Let's mark all population counts that already can be encoded
46 good_for_rle
= (uint8_t*)calloc(length
, 1);
47 if (good_for_rle
== NULL
) {
51 // Let's not spoil any of the existing good rle codes.
52 // Mark any seq of 0's that is longer as 5 as a good_for_rle.
53 // Mark any seq of non-0's that is longer as 7 as a good_for_rle.
54 int symbol
= counts
[0];
56 for (i
= 0; i
< length
+ 1; ++i
) {
57 if (i
== length
|| counts
[i
] != symbol
) {
58 if ((symbol
== 0 && stride
>= 5) ||
59 (symbol
!= 0 && stride
>= 7)) {
61 for (k
= 0; k
< stride
; ++k
) {
62 good_for_rle
[i
- k
- 1] = 1;
74 // 3) Let's replace those population counts that lead to more rle codes.
77 int limit
= counts
[0];
79 for (i
= 0; i
< length
+ 1; ++i
) {
80 if (i
== length
|| good_for_rle
[i
] ||
81 (i
!= 0 && good_for_rle
[i
- 1]) ||
82 !ValuesShouldBeCollapsedToStrideAverage(counts
[i
], limit
)) {
83 if (stride
>= 4 || (stride
>= 3 && sum
== 0)) {
85 // The stride must end, collapse what we have, if we have enough (4).
86 int count
= (sum
+ stride
/ 2) / stride
;
91 // Don't make an all zeros stride to be upgraded to ones.
94 for (k
= 0; k
< stride
; ++k
) {
95 // We don't want to change value at counts[i],
96 // that is already belonging to the next stride. Thus - 1.
97 counts
[i
- k
- 1] = count
;
102 if (i
< length
- 3) {
103 // All interesting strides have a count of at least 4,
104 // at least when non-zeros.
105 limit
= (counts
[i
] + counts
[i
+ 1] +
106 counts
[i
+ 2] + counts
[i
+ 3] + 2) / 4;
107 } else if (i
< length
) {
117 limit
= (sum
+ stride
/ 2) / stride
;
129 int pool_index_left_
;
130 int pool_index_right_
;
133 // A comparer function for two Huffman trees: sorts first by 'total count'
134 // (more comes first), and then by 'value' (more comes first).
135 static int CompareHuffmanTrees(const void* ptr1
, const void* ptr2
) {
136 const HuffmanTree
* const t1
= (const HuffmanTree
*)ptr1
;
137 const HuffmanTree
* const t2
= (const HuffmanTree
*)ptr2
;
138 if (t1
->total_count_
> t2
->total_count_
) {
140 } else if (t1
->total_count_
< t2
->total_count_
) {
143 assert(t1
->value_
!= t2
->value_
);
144 return (t1
->value_
< t2
->value_
) ? -1 : 1;
148 static void SetBitDepths(const HuffmanTree
* const tree
,
149 const HuffmanTree
* const pool
,
150 uint8_t* const bit_depths
, int level
) {
151 if (tree
->pool_index_left_
>= 0) {
152 SetBitDepths(&pool
[tree
->pool_index_left_
], pool
, bit_depths
, level
+ 1);
153 SetBitDepths(&pool
[tree
->pool_index_right_
], pool
, bit_depths
, level
+ 1);
155 bit_depths
[tree
->value_
] = level
;
159 // Create an optimal Huffman tree.
161 // (data,length): population counts.
162 // tree_limit: maximum bit depth (inclusive) of the codes.
163 // bit_depths[]: how many bits are used for the symbol.
165 // Returns 0 when an error has occurred.
167 // The catch here is that the tree cannot be arbitrarily deep
169 // count_limit is the value that is to be faked as the minimum value
170 // and this minimum value is raised until the tree matches the
171 // maximum length requirement.
173 // This algorithm is not of excellent performance for very long data blocks,
174 // especially when population counts are longer than 2**tree_limit, but
175 // we are not planning to use this with extremely long blocks.
177 // See http://en.wikipedia.org/wiki/Huffman_coding
178 static int GenerateOptimalTree(const int* const histogram
, int histogram_size
,
179 int tree_depth_limit
,
180 uint8_t* const bit_depths
) {
182 HuffmanTree
* tree_pool
;
184 int tree_size_orig
= 0;
187 for (i
= 0; i
< histogram_size
; ++i
) {
188 if (histogram
[i
] != 0) {
193 if (tree_size_orig
== 0) { // pretty optimal already!
197 // 3 * tree_size is enough to cover all the nodes representing a
198 // population and all the inserted nodes combining two existing nodes.
199 // The tree pool needs 2 * (tree_size_orig - 1) entities, and the
200 // tree needs exactly tree_size_orig entities.
201 tree
= (HuffmanTree
*)WebPSafeMalloc(3ULL * tree_size_orig
, sizeof(*tree
));
202 if (tree
== NULL
) return 0;
203 tree_pool
= tree
+ tree_size_orig
;
205 // For block sizes with less than 64k symbols we never need to do a
206 // second iteration of this loop.
207 // If we actually start running inside this loop a lot, we would perhaps
208 // be better off with the Katajainen algorithm.
209 assert(tree_size_orig
<= (1 << (tree_depth_limit
- 1)));
210 for (count_min
= 1; ; count_min
*= 2) {
211 int tree_size
= tree_size_orig
;
212 // We need to pack the Huffman tree in tree_depth_limit bits.
213 // So, we try by faking histogram entries to be at least 'count_min'.
216 for (j
= 0; j
< histogram_size
; ++j
) {
217 if (histogram
[j
] != 0) {
219 (histogram
[j
] < count_min
) ? count_min
: histogram
[j
];
220 tree
[idx
].total_count_
= count
;
221 tree
[idx
].value_
= j
;
222 tree
[idx
].pool_index_left_
= -1;
223 tree
[idx
].pool_index_right_
= -1;
228 // Build the Huffman tree.
229 qsort(tree
, tree_size
, sizeof(*tree
), CompareHuffmanTrees
);
231 if (tree_size
> 1) { // Normal case.
232 int tree_pool_size
= 0;
233 while (tree_size
> 1) { // Finish when we have only one root.
235 tree_pool
[tree_pool_size
++] = tree
[tree_size
- 1];
236 tree_pool
[tree_pool_size
++] = tree
[tree_size
- 2];
237 count
= tree_pool
[tree_pool_size
- 1].total_count_
+
238 tree_pool
[tree_pool_size
- 2].total_count_
;
241 // Search for the insertion point.
243 for (k
= 0; k
< tree_size
; ++k
) {
244 if (tree
[k
].total_count_
<= count
) {
248 memmove(tree
+ (k
+ 1), tree
+ k
, (tree_size
- k
) * sizeof(*tree
));
249 tree
[k
].total_count_
= count
;
252 tree
[k
].pool_index_left_
= tree_pool_size
- 1;
253 tree
[k
].pool_index_right_
= tree_pool_size
- 2;
254 tree_size
= tree_size
+ 1;
257 SetBitDepths(&tree
[0], tree_pool
, bit_depths
, 0);
258 } else if (tree_size
== 1) { // Trivial case: only one element.
259 bit_depths
[tree
[0].value_
] = 1;
263 // Test if this Huffman tree satisfies our 'tree_depth_limit' criteria.
264 int max_depth
= bit_depths
[0];
265 for (j
= 1; j
< histogram_size
; ++j
) {
266 if (max_depth
< bit_depths
[j
]) {
267 max_depth
= bit_depths
[j
];
270 if (max_depth
<= tree_depth_limit
) {
279 // -----------------------------------------------------------------------------
280 // Coding of the Huffman tree values
282 static HuffmanTreeToken
* CodeRepeatedValues(int repetitions
,
283 HuffmanTreeToken
* tokens
,
284 int value
, int prev_value
) {
285 assert(value
<= MAX_ALLOWED_CODE_LENGTH
);
286 if (value
!= prev_value
) {
287 tokens
->code
= value
;
288 tokens
->extra_bits
= 0;
292 while (repetitions
>= 1) {
293 if (repetitions
< 3) {
295 for (i
= 0; i
< repetitions
; ++i
) {
296 tokens
->code
= value
;
297 tokens
->extra_bits
= 0;
301 } else if (repetitions
< 7) {
303 tokens
->extra_bits
= repetitions
- 3;
308 tokens
->extra_bits
= 3;
316 static HuffmanTreeToken
* CodeRepeatedZeros(int repetitions
,
317 HuffmanTreeToken
* tokens
) {
318 while (repetitions
>= 1) {
319 if (repetitions
< 3) {
321 for (i
= 0; i
< repetitions
; ++i
) {
322 tokens
->code
= 0; // 0-value
323 tokens
->extra_bits
= 0;
327 } else if (repetitions
< 11) {
329 tokens
->extra_bits
= repetitions
- 3;
332 } else if (repetitions
< 139) {
334 tokens
->extra_bits
= repetitions
- 11;
339 tokens
->extra_bits
= 0x7f; // 138 repeated 0s
347 int VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode
* const tree
,
348 HuffmanTreeToken
* tokens
, int max_tokens
) {
349 HuffmanTreeToken
* const starting_token
= tokens
;
350 HuffmanTreeToken
* const ending_token
= tokens
+ max_tokens
;
351 const int depth_size
= tree
->num_symbols
;
352 int prev_value
= 8; // 8 is the initial value for rle.
354 assert(tokens
!= NULL
);
355 while (i
< depth_size
) {
356 const int value
= tree
->code_lengths
[i
];
359 while (k
< depth_size
&& tree
->code_lengths
[k
] == value
) ++k
;
362 tokens
= CodeRepeatedZeros(runs
, tokens
);
364 tokens
= CodeRepeatedValues(runs
, tokens
, value
, prev_value
);
368 assert(tokens
<= ending_token
);
370 (void)ending_token
; // suppress 'unused variable' warning
371 return (int)(tokens
- starting_token
);
374 // -----------------------------------------------------------------------------
376 // Pre-reversed 4-bit values.
377 static const uint8_t kReversedBits
[16] = {
378 0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe,
379 0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf
382 static uint32_t ReverseBits(int num_bits
, uint32_t bits
) {
385 while (i
< num_bits
) {
387 retval
|= kReversedBits
[bits
& 0xf] << (MAX_ALLOWED_CODE_LENGTH
+ 1 - i
);
390 retval
>>= (MAX_ALLOWED_CODE_LENGTH
+ 1 - num_bits
);
394 // Get the actual bit values for a tree of bit depths.
395 static void ConvertBitDepthsToSymbols(HuffmanTreeCode
* const tree
) {
396 // 0 bit-depth means that the symbol does not exist.
399 uint32_t next_code
[MAX_ALLOWED_CODE_LENGTH
+ 1];
400 int depth_count
[MAX_ALLOWED_CODE_LENGTH
+ 1] = { 0 };
402 assert(tree
!= NULL
);
403 len
= tree
->num_symbols
;
404 for (i
= 0; i
< len
; ++i
) {
405 const int code_length
= tree
->code_lengths
[i
];
406 assert(code_length
<= MAX_ALLOWED_CODE_LENGTH
);
407 ++depth_count
[code_length
];
409 depth_count
[0] = 0; // ignore unused symbol
413 for (i
= 1; i
<= MAX_ALLOWED_CODE_LENGTH
; ++i
) {
414 code
= (code
+ depth_count
[i
- 1]) << 1;
418 for (i
= 0; i
< len
; ++i
) {
419 const int code_length
= tree
->code_lengths
[i
];
420 tree
->codes
[i
] = ReverseBits(code_length
, next_code
[code_length
]++);
424 // -----------------------------------------------------------------------------
427 int VP8LCreateHuffmanTree(int* const histogram
, int tree_depth_limit
,
428 HuffmanTreeCode
* const tree
) {
429 const int num_symbols
= tree
->num_symbols
;
430 if (!OptimizeHuffmanForRle(num_symbols
, histogram
)) {
433 if (!GenerateOptimalTree(histogram
, num_symbols
,
434 tree_depth_limit
, tree
->code_lengths
)) {
437 // Create the actual bit codes for the bit lengths.
438 ConvertBitDepthsToSymbols(tree
);