1 /* vi: set sw=4 ts=4: */
3 * Gzip implementation for busybox
5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly.
7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com>
8 * "this is a stripped down version of gzip I put into busybox, it does
9 * only standard in to standard out with -9 compression. It also requires
10 * the zcat module for some important functions."
12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
13 * files as well as stdin/stdout, and to generally behave itself wrt
14 * command line handling.
16 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
18 /* TODO: full support for -v for DESKTOP
19 * "/usr/bin/gzip -v a bogus aa" should say:
20 a: 85.1% -- replaced with a.gz
21 gzip: bogus: No such file or directory
22 aa: 85.1% -- replaced with aa.gz
25 //config: bool "gzip (17 kb)"
28 //config: gzip is used to compress files.
29 //config: It's probably the most widely used UNIX compression program.
31 //config:config FEATURE_GZIP_LONG_OPTIONS
32 //config: bool "Enable long options"
34 //config: depends on GZIP && LONG_OPTS
36 //config:config GZIP_FAST
37 //config: int "Trade memory for speed (0:small,slow - 2:fast,big)"
40 //config: depends on GZIP
42 //config: Enable big memory options for gzip.
43 //config: 0: small buffers, small hash-tables
44 //config: 1: larger buffers, larger hash-tables
45 //config: 2: larger buffers, largest hash-tables
46 //config: Larger models may give slightly better compression
48 //config:config FEATURE_GZIP_LEVELS
49 //config: bool "Enable compression levels"
51 //config: depends on GZIP
53 //config: Enable support for compression levels 4-9. The default level
54 //config: is 6. If levels 1-3 are specified, 4 is used.
55 //config: If this option is not selected, -N options are ignored and -6
58 //config:config FEATURE_GZIP_DECOMPRESS
59 //config: bool "Enable decompression"
61 //config: depends on GZIP || GUNZIP || ZCAT
63 //config: Enable -d (--decompress) and -t (--test) options for gzip.
64 //config: This will be automatically selected if gunzip or zcat is
67 //applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP))
69 //kbuild:lib-$(CONFIG_GZIP) += gzip.o
76 #include "bb/lib/public.h"
77 #include "bb/archival/libarchive/public.h"
79 /* ===========================================================================
82 /* Diagnostic functions */
84 BB_STATIC
int bb_archival_gzip_verbose
;
85 # define Assert(cond,msg) { if (!(cond)) bb_simple_error_msg(msg); }
86 # define Trace(x) fprintf x
87 # define Tracev(x) {if (bb_archival_gzip_verbose) fprintf x; }
88 # define Tracevv(x) {if (bb_archival_gzip_verbose > 1) fprintf x; }
89 # define Tracec(c,x) {if (bb_archival_gzip_verbose && (c)) fprintf x; }
90 # define Tracecv(c,x) {if (bb_archival_gzip_verbose > 1 && (c)) fprintf x; }
92 # define Assert(cond,msg)
100 /* ===========================================================================
102 #if BB_CFG_GZIP_FAST == 0
104 #elif BB_CFG_GZIP_FAST == 1
106 #elif BB_CFG_GZIP_FAST == 2
109 # error "Invalid BB_CFG_GZIP_FAST value"
114 # define INBUFSIZ 0x2000 /* input buffer size */
116 # define INBUFSIZ 0x8000 /* input buffer size */
122 # define OUTBUFSIZ 8192 /* output buffer size */
124 # define OUTBUFSIZ 16384 /* output buffer size */
130 # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
132 # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
137 #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */
138 #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
139 #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */
140 #define ORIG_NAME 0x08 /* bit 3 set: original file name present */
141 #define COMMENT 0x10 /* bit 4 set: file comment present */
142 #define RESERVED 0xC0 /* bit 6,7: reserved */
144 /* internal file attribute */
145 #define UNKNOWN 0xffff
150 # define WSIZE 0x8000 /* window size--must be a power of two, and */
151 #endif /* at least 32K for zip's deflate method */
154 #define MAX_MATCH 258
155 /* The minimum and maximum match lengths */
157 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
158 /* Minimum amount of lookahead, except at the end of the input file.
159 * See deflate.c for comments about the MIN_MATCH+1.
162 #define MAX_DIST (WSIZE-MIN_LOOKAHEAD)
163 /* In order to simplify the code, particularly on 16 bit machines, match
164 * distances are limited to MAX_DIST instead of WSIZE.
168 # define MAX_PATH_LEN 1024 /* max pathname length */
171 #define seekable() 0 /* force sequential output */
172 #define translate_eol 0 /* no option -a yet */
177 #define INIT_BITS 9 /* Initial number of bits per code */
179 #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */
180 /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free.
181 * It's a pity that old uncompress does not check bit 0x20. That makes
182 * extension of the format actually undesirable because old compress
183 * would just crash on the new format instead of giving a meaningful
184 * error message. It does check the number of bits, but it's more
185 * helpful to say "unsupported format, get a new version" than
186 * "can only handle 16 bits".
190 # define MAX_SUFFIX MAX_EXT_CHARS
192 # define MAX_SUFFIX 30
195 /* ===========================================================================
196 * Compile with MEDIUM_MEM to reduce the memory requirements or
197 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the
198 * entire input file can be held in memory (not possible on 16 bit systems).
199 * Warning: defining these symbols affects HASH_BITS (see below) and thus
200 * affects the compression ratio. The compressed output
201 * is still correct, and might even be smaller in some cases.
204 # define HASH_BITS 13 /* Number of bits used to hash strings */
207 # define HASH_BITS 14
210 # define HASH_BITS 15
211 /* For portability to 16 bit machines, do not use values above 15. */
214 #define HASH_SIZE (unsigned)(1<<HASH_BITS)
215 #define HASH_MASK (HASH_SIZE-1)
216 #define WMASK (WSIZE-1)
217 /* HASH_SIZE and WSIZE must be powers of two */
219 # define TOO_FAR 4096
221 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
223 /* ===========================================================================
224 * These types are not really 'char', 'short' and 'long'
232 #define IPos unsigned
233 /* A Pos is an index in the character window. We use short instead of int to
234 * save space in the various tables. IPos is used only for parameter passing.
238 BB_ARCHIVAL_GZIP_WINDOW_SIZE
= 2 * WSIZE
,
239 /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the
240 * input file length plus MIN_LOOKAHEAD.
244 struct bb_archival_gzip_globals
{
245 /* =========================================================================== */
246 /* global buffers, allocated once */
248 #define DECLARE(type, array, size) \
250 #define ALLOC(type, array, size) \
251 array = bb_xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type))
252 #define FREE(array) \
253 do { free(array); array = NULL; } while (0)
255 /* buffer for literals or lengths */
256 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */
257 DECLARE(uch
, l_buf
, INBUFSIZ
);
259 DECLARE(ush
, d_buf
, DIST_BUFSIZE
);
260 DECLARE(uch
, outbuf
, OUTBUFSIZ
);
262 /* Sliding window. Input bytes are read into the second half of the window,
263 * and move to the first half later to keep a dictionary of at least WSIZE
264 * bytes. With this organization, matches are limited to a distance of
265 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always
266 * performed with a length multiple of the block size. Also, it limits
267 * the window size to 64K, which is quite useful on MSDOS.
268 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would
269 * be less efficient).
271 DECLARE(uch
, window
, 2L * WSIZE
);
273 /* Link to older string with same hash index. To limit the size of this
274 * array to 64K, this link is maintained only for the last 32K strings.
275 * An index in this array is thus a window index modulo 32K.
277 /* DECLARE(Pos, prev, WSIZE); */
278 DECLARE(ush
, prev
, 1L << BITS
);
280 /* Heads of the hash chains or 0. */
281 /* DECLARE(Pos, head, 1<<HASH_BITS); */
282 #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */
284 unsigned comp_level_minus4
; /* can be a byte */
285 unsigned max_chain_length
;
286 unsigned max_lazy_match
;
289 #define comp_level_minus4 (G1.comp_level_minus4)
290 #define max_chain_length (G1.max_chain_length)
291 #define max_lazy_match (G1.max_lazy_match)
292 #define good_match (G1.good_match)
293 #define nice_match (G1.nice_match)
295 /* =========================================================================== */
296 /* all members below are zeroed out in pack_gzip() for each next file */
298 uint32_t crc
; /* shift register contents */
299 /*uint32_t *crc_32_tab;*/
301 /* window position at the beginning of the current output block. Gets
302 * negative when the window is moved backwards.
306 unsigned ins_h
; /* hash index of string to be inserted */
308 /* Number of bits by which ins_h and del_h must be shifted at each
309 * input step. It must be such that after MIN_MATCH steps, the oldest
310 * byte no longer takes part in the hash key, that is:
311 * H_SHIFT * MIN_MATCH >= HASH_BITS
313 #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH)
315 /* Length of the best match at previous step. Matches not greater than this
316 * are discarded. This is used in the lazy match evaluation.
318 unsigned prev_length
;
320 unsigned strstart
; /* start of string to insert */
321 unsigned match_start
; /* start of matching string */
322 unsigned lookahead
; /* number of valid bytes ahead in window */
324 /* number of input bytes */
325 ulg isize
; /* only 32 bits stored in .gz file */
327 /* bbox always use stdin/stdout */
328 #define ifd STDIN_FILENO /* input file descriptor */
329 #define ofd STDOUT_FILENO /* output file descriptor */
332 unsigned insize
; /* valid bytes in l_buf */
334 unsigned outcnt
; /* bytes in output buffer */
335 uint8_t eofile
; /* flag set at end of input file */
337 /* ===========================================================================
338 * Local data used by the "bit string" routines.
341 /* Output buffer. bits are inserted starting at the bottom (least significant
344 unsigned bi_buf
; /* was unsigned short */
347 #define BUF_SIZE (int)(8 * sizeof(G1.bi_buf))
349 /* Number of bits used within bi_buf. (bi_buf might be implemented on
350 * more than 16 bits on some systems.)
355 ulg bits_sent
; /* bit length of the compressed data */
356 # define DEBUG_bits_sent(v) (void)(G1.bits_sent v)
358 # define DEBUG_bits_sent(v) ((void)0)
362 #define G1ptr ((struct bb_archival_gzip_globals*)(bb_ptr_to_globals) - 1)
365 /* ===========================================================================
366 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
367 * (used for the compressed data only)
369 BB_STATIC
void bb_archival_gzip_flush_outbuf(void)
374 bb_xwrite(ofd
, (char *) G1
.outbuf
, G1
.outcnt
);
377 /* ===========================================================================
379 /* put_8bit is used for the compressed output */
380 #define bb_archival_gzip_put_8bit(c) \
382 G1.outbuf[G1.outcnt++] = (c); \
383 if (G1.outcnt == OUTBUFSIZ) \
384 bb_archival_gzip_flush_outbuf(); \
386 /* Output a 16 bit value, lsb first */
387 BB_STATIC
void bb_archival_gzip_put_16bit(ush w
)
389 /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt
390 * (probably because of fear of aliasing with G1.outbuf[]
391 * stores), do it explicitly:
393 unsigned outcnt
= G1
.outcnt
;
394 uch
*dst
= &G1
.outbuf
[outcnt
];
396 if (outcnt
< OUTBUFSIZ
-2) {
398 ush
*dst16
= (void*) dst
;
399 *dst16
= w
; /* unaligned LSB 16-bit store */
400 G1
.outcnt
= outcnt
+ 2;
405 G1
.outcnt
= ++outcnt
;
407 /* Slowpath: we will need to do flush_outbuf() */
408 if (outcnt
== OUTBUFSIZ
)
409 bb_archival_gzip_flush_outbuf(); /* here */
410 bb_archival_gzip_put_8bit(w
); /* or here */
413 #define OPTIMIZED_PUT_32BIT (BB_CFG_GZIP_FAST > 0)
414 BB_STATIC
void bb_archival_gzip_put_32bit(ulg n
)
416 #if OPTIMIZED_PUT_32BIT
417 unsigned outcnt
= G1
.outcnt
;
418 if (outcnt
< OUTBUFSIZ
-4) {
420 ulg
*dst32
= (void*) &G1
.outbuf
[outcnt
];
421 *dst32
= n
; /* unaligned LSB 32-bit store */
422 //bb_error_msg("%p", dst32); // store alignment debugging
423 G1
.outcnt
= outcnt
+ 4;
427 bb_archival_gzip_put_16bit(n
);
428 bb_archival_gzip_put_16bit(n
>> 16);
430 BB_STATIC
void bb_archival_gzip_flush_outbuf_if_32bit_optimized(void)
432 #if OPTIMIZED_PUT_32BIT
433 /* If put_32bit() performs 32bit stores && it is used in send_bits() */
435 bb_archival_gzip_flush_outbuf();
438 /* ===========================================================================
439 * Run a set of bytes through the crc shift register. If s is a NULL
440 * pointer, then initialize the crc shift register contents instead.
441 * Return the current crc in either case.
443 BB_STATIC
void bb_archival_gzip_updcrc(uch
*s
, unsigned n
)
445 G1
.crc
= bb_crc32_block_endian0(G1
.crc
, s
, n
, bb_global_crc32_table
/*G1.crc_32_tab*/);
447 /* ===========================================================================
448 * Read a new buffer from the current input file, perform end-of-line
449 * translation, and update the crc and input file size.
450 * IN assertion: size >= 2 (for end-of-line translation)
452 BB_STATIC
unsigned bb_archival_gzip_file_read(void *buf
, unsigned size
)
456 Assert(G1
.insize
== 0, "l_buf not empty");
458 len
= bb_safe_read(ifd
, buf
, size
);
459 if (len
== (unsigned)(-1) || len
== 0)
462 bb_archival_gzip_updcrc(buf
, len
);
466 /* ===========================================================================
467 * Send a value on a given number of bits.
468 * IN assertion: length <= 16 and value fits in length bits.
470 BB_STATIC
void bb_archival_gzip_send_bits(unsigned value
, unsigned length
)
475 Tracev((stderr
, " l %2d v %4x ", length
, value
));
476 Assert(length
> 0 && length
<= 15, "invalid length");
477 DEBUG_bits_sent(+= length
);
479 BB_BUILD_BUG_ON(BUF_SIZE
!= 32 && BUF_SIZE
!= 16);
481 new_buf
= G1
.bi_buf
| (value
<< G1
.bi_valid
);
482 /* NB: the above may sometimes do "<< 32" shift (undefined)
483 * if check below is changed to "length > BUF_SIZE" instead of >= */
484 length
+= G1
.bi_valid
;
486 /* If bi_buf is full */
487 if (length
>= BUF_SIZE
) {
488 /* ...use (valid) bits from bi_buf and
489 * (BUF_SIZE - bi_valid) bits from value,
490 * leaving (width - (BUF_SIZE-bi_valid)) unused bits in value.
492 value
>>= (BUF_SIZE
- G1
.bi_valid
);
493 if (BUF_SIZE
== 32) {
494 bb_archival_gzip_put_32bit(new_buf
);
496 bb_archival_gzip_put_16bit(new_buf
);
502 G1
.bi_valid
= length
;
505 /* ===========================================================================
506 * Reverse the first len bits of a code, using straightforward code (a faster
507 * method would use a table)
508 * IN assertion: 1 <= len <= 15
510 BB_STATIC
unsigned bb_archival_gzip_bi_reverse(unsigned code
, int len
)
516 if (--len
<= 0) return res
;
522 /* ===========================================================================
523 * Write out any remaining bits in an incomplete byte.
525 BB_STATIC
void bb_archival_gzip_bi_windup(void)
527 unsigned bits
= G1
.bi_buf
;
528 int cnt
= G1
.bi_valid
;
531 bb_archival_gzip_put_8bit(bits
);
537 DEBUG_bits_sent(= (G1
.bits_sent
+ 7) & ~7);
540 /* ===========================================================================
541 * Copy a stored block to the zip file, storing first the length and its
542 * one's complement if requested.
544 BB_STATIC
void bb_archival_gzip_copy_block(const char *buf
, unsigned len
, int header
)
546 bb_archival_gzip_bi_windup(); /* align on byte boundary */
549 unsigned v
= ((uint16_t)len
) | ((~len
) << 16);
550 bb_archival_gzip_put_32bit(v
);
551 DEBUG_bits_sent(+= 2 * 16);
553 DEBUG_bits_sent(+= (ulg
) len
<< 3);
555 bb_archival_gzip_put_8bit(*buf
++);
557 /* The above can 32-bit misalign outbuf */
558 if (G1
.outcnt
& 3) /* syscalls are expensive, is it really misaligned? */
559 bb_archival_gzip_flush_outbuf_if_32bit_optimized();
562 /* ===========================================================================
563 * Fill the window when the lookahead becomes insufficient.
564 * Updates strstart and lookahead, and sets eofile if end of input file.
565 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
566 * OUT assertions: at least one byte has been read, or eofile is set;
567 * file reads are performed for at least two bytes (required for the
568 * translate_eol option).
570 BB_STATIC
void bb_archival_gzip_fill_window(void)
573 unsigned more
= BB_ARCHIVAL_GZIP_WINDOW_SIZE
- G1
.lookahead
- G1
.strstart
;
574 /* Amount of free space at the end of the window. */
576 /* If the window is almost full and there is insufficient lookahead,
577 * move the upper half to the lower one to make room in the upper half.
579 if (more
== (unsigned) -1) {
580 /* Very unlikely, but possible on 16 bit machine if strstart == 0
581 * and lookahead == 1 (input done one byte at time)
584 } else if (G1
.strstart
>= WSIZE
+ MAX_DIST
) {
585 /* By the IN assertion, the window is not empty so we can't confuse
586 * more == 0 with more == 64K on a 16 bit machine.
588 Assert(BB_ARCHIVAL_GZIP_WINDOW_SIZE
== 2 * WSIZE
, "no sliding with BIG_MEM");
590 memcpy(G1
.window
, G1
.window
+ WSIZE
, WSIZE
);
591 G1
.match_start
-= WSIZE
;
592 G1
.strstart
-= WSIZE
; /* we now have strstart >= MAX_DIST: */
594 G1
.block_start
-= WSIZE
;
596 for (n
= 0; n
< HASH_SIZE
; n
++) {
598 head
[n
] = (Pos
) (m
>= WSIZE
? m
- WSIZE
: 0);
600 for (n
= 0; n
< WSIZE
; n
++) {
602 G1
.prev
[n
] = (Pos
) (m
>= WSIZE
? m
- WSIZE
: 0);
603 /* If n is not on any hash chain, prev[n] is garbage but
604 * its value will never be used.
609 /* At this point, more >= 2 */
611 n
= bb_archival_gzip_file_read(G1
.window
+ G1
.strstart
+ G1
.lookahead
, more
);
612 if (n
== 0 || n
== (unsigned) -1) {
619 /* Both users fill window with the same loop: */
620 BB_STATIC
void bb_archival_gzip_fill_window_if_needed(void)
622 while (G1
.lookahead
< MIN_LOOKAHEAD
&& !G1
.eofile
)
623 bb_archival_gzip_fill_window();
626 /* ===========================================================================
627 * Set match_start to the longest match starting at the given string and
628 * return its length. Matches shorter or equal to prev_length are discarded,
629 * in which case the result is equal to prev_length and match_start is
631 * IN assertions: cur_match is the head of the hash chain for the current
632 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
635 /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or
636 * match.s. The code is functionally equivalent, so you can use the C version
639 BB_STATIC
int bb_archival_gzip_longest_match(IPos cur_match
)
641 unsigned chain_length
= max_chain_length
; /* max hash chain length */
642 uch
*scan
= G1
.window
+ G1
.strstart
; /* current string */
643 uch
*match
; /* matched string */
644 int len
; /* length of current match */
645 int best_len
= G1
.prev_length
; /* best match length so far */
646 IPos limit
= G1
.strstart
> (IPos
) MAX_DIST
? G1
.strstart
- (IPos
) MAX_DIST
: 0;
647 /* Stop when cur_match becomes <= limit. To simplify the code,
648 * we prevent matches with the string of window index 0.
651 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
652 * It is easy to get rid of this optimization if necessary.
654 #if HASH_BITS < 8 || MAX_MATCH != 258
655 # error Code too clever
657 uch
*strend
= G1
.window
+ G1
.strstart
+ MAX_MATCH
;
658 uch scan_end1
= scan
[best_len
- 1];
659 uch scan_end
= scan
[best_len
];
661 /* Do not waste too much time if we already have a good match: */
662 if (G1
.prev_length
>= good_match
) {
665 Assert(G1
.strstart
<= BB_ARCHIVAL_GZIP_WINDOW_SIZE
- MIN_LOOKAHEAD
, "insufficient lookahead");
668 Assert(cur_match
< G1
.strstart
, "no future");
669 match
= G1
.window
+ cur_match
;
671 /* Skip to next match if the match length cannot increase
672 * or if the match length is less than 2:
674 if (match
[best_len
] != scan_end
675 || match
[best_len
- 1] != scan_end1
676 || *match
!= *scan
|| *++match
!= scan
[1]
681 /* The check at best_len-1 can be removed because it will be made
682 * again later. (This heuristic is not always a win.)
683 * It is not necessary to compare scan[2] and match[2] since they
684 * are always equal when the other bytes match, given that
685 * the hash keys are equal and that HASH_BITS >= 8.
689 /* We check for insufficient lookahead only every 8th comparison;
690 * the 256th check will be made at strstart+258.
693 } while (*++scan
== *++match
&& *++scan
== *++match
&&
694 *++scan
== *++match
&& *++scan
== *++match
&&
695 *++scan
== *++match
&& *++scan
== *++match
&&
696 *++scan
== *++match
&& *++scan
== *++match
&& scan
< strend
);
698 len
= MAX_MATCH
- (int) (strend
- scan
);
699 scan
= strend
- MAX_MATCH
;
701 if (len
> best_len
) {
702 G1
.match_start
= cur_match
;
704 if (len
>= nice_match
)
706 scan_end1
= scan
[best_len
- 1];
707 scan_end
= scan
[best_len
];
709 } while ((cur_match
= G1
.prev
[cur_match
& WMASK
]) > limit
710 && --chain_length
!= 0);
716 /* ===========================================================================
717 * Check that the match at match_start is indeed a match.
719 BB_STATIC
void bb_archival_gzip_check_match(IPos start
, IPos match
, int length
)
721 /* check that the match is indeed a match */
722 if (memcmp(G1
.window
+ match
, G1
.window
+ start
, length
) != 0) {
723 bb_error_msg(" start %d, match %d, length %d", start
, match
, length
);
724 bb_simple_error_msg("invalid match");
727 bb_error_msg("\\[%d,%d]", start
- match
, length
);
729 bb_putchar_stderr(G1
.window
[start
++]);
730 } while (--length
!= 0);
734 # define bb_archival_gzip_check_match(start, match, length) ((void)0)
736 /* trees.c -- output deflated data using Huffman coding
737 * Copyright (C) 1992-1993 Jean-loup Gailly
738 * This is free software; you can redistribute it and/or modify it under the
739 * terms of the GNU General Public License, see the file COPYING.
743 * Encode various sets of source values using variable-length
747 * The PKZIP "deflation" process uses several Huffman trees. The more
748 * common source values are represented by shorter bit sequences.
750 * Each code tree is stored in the ZIP file in a compressed form
751 * which is itself a Huffman encoding of the lengths of
752 * all the code strings (in ascending order by source values).
753 * The actual code strings are reconstructed from the lengths in
754 * the UNZIP process, as described in the "application note"
755 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program.
759 * Data Compression: Techniques and Applications, pp. 53-55.
760 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7.
763 * Data Compression: Methods and Theory, pp. 49-50.
764 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
768 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
772 * Allocate the match buffer, initialize the various tables [and save
773 * the location of the internal file attribute (ascii/binary) and
774 * method (DEFLATE/STORE) -- deleted in bbox]
776 * void ct_tally(int dist, int lc);
777 * Save the match info and tally the frequency counts.
779 * ulg flush_block(char *buf, ulg stored_len, int eof)
780 * Determine the best encoding for the current block: dynamic trees,
781 * static trees or store, and output the encoded block to the zip
782 * file. Returns the total compressed length for the file so far.
786 /* All codes must not exceed MAX_BITS bits */
788 #define MAX_BL_BITS 7
789 /* Bit length codes must not exceed MAX_BL_BITS bits */
791 #define LENGTH_CODES 29
792 /* number of length codes, not counting the special END_BLOCK code */
795 /* number of literal bytes 0..255 */
797 #define END_BLOCK 256
798 /* end of block literal code */
800 #define L_CODES (LITERALS+1+LENGTH_CODES)
801 /* number of Literal or Length codes, including the END_BLOCK code */
804 /* number of distance codes */
807 /* number of codes used to transfer the bit lengths */
809 /* extra bits for each length code */
810 BB_STATIC
const uint8_t bb_archival_gzip_extra_lbits
[LENGTH_CODES
] = {
811 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4,
815 /* extra bits for each distance code */
816 BB_STATIC
const uint8_t bb_archival_gzip_extra_dbits
[D_CODES
] = {
817 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,
818 10, 10, 11, 11, 12, 12, 13, 13
821 /* extra bits for each bit length code */
822 BB_STATIC
const uint8_t bb_archival_gzip_extra_blbits
[BL_CODES
] = {
823 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
825 /* number of codes at each bit length for an optimal tree */
826 BB_STATIC
const uint8_t bb_archival_gzip_bl_order
[BL_CODES
] = {
827 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
829 #define STORED_BLOCK 0
830 #define STATIC_TREES 1
832 /* The three kinds of block type */
836 # define LIT_BUFSIZE 0x2000
839 # define LIT_BUFSIZE 0x4000
841 # define LIT_BUFSIZE 0x8000
846 # define DIST_BUFSIZE LIT_BUFSIZE
848 /* Sizes of match buffers for literals/lengths and distances. There are
849 * 4 reasons for limiting LIT_BUFSIZE to 64K:
850 * - frequencies can be kept in 16 bit counters
851 * - if compression is not successful for the first block, all input data is
852 * still in the window so we can still emit a stored block even when input
853 * comes from standard input. (This can also be done for all blocks if
854 * LIT_BUFSIZE is not greater than 32K.)
855 * - if compression is not successful for a file smaller than 64K, we can
856 * even emit a stored file instead of a stored block (saving 5 bytes).
857 * - creating new Huffman trees less frequently may not provide fast
858 * adaptation to changes in the input data statistics. (Take for
859 * example a binary file with poorly compressible code followed by
860 * a highly compressible string table.) Smaller buffer sizes give
861 * fast adaptation but have of course the overhead of transmitting trees
863 * - I can't count above 4
864 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save
865 * memory at the expense of compression). Some optimizations would be possible
866 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE.
869 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
871 /* repeat a zero length 3-10 times (3 bits of repeat count) */
872 #define REPZ_11_138 18
873 /* repeat a zero length 11-138 times (7 bits of repeat count) */
875 /* ===========================================================================
877 /* Data structure describing a single value and its code string. */
878 struct bb_archival_gzip_ct_data
{
880 ush freq
; /* frequency count */
881 ush code
; /* bit string */
884 ush dad
; /* father node in Huffman tree */
885 ush len
; /* length of bit string */
894 #define HEAP_SIZE (2*L_CODES + 1)
895 /* maximum heap size */
897 struct bb_archival_gzip_tree_desc
{
898 struct bb_archival_gzip_ct_data
*dyn_tree
; /* the dynamic tree */
899 struct bb_archival_gzip_ct_data
*static_tree
; /* corresponding static tree or NULL */
900 const uint8_t *extra_bits
; /* extra bits for each code or NULL */
901 int extra_base
; /* base index for extra_bits */
902 int elems
; /* max number of elements in the tree */
903 int max_length
; /* max bit length for the codes */
904 int max_code
; /* largest code with non zero frequency */
907 struct bb_archival_gzip_globals2
{
909 ush heap
[HEAP_SIZE
]; /* heap used to build the Huffman trees */
910 int heap_len
; /* number of elements in the heap */
911 int heap_max
; /* element of largest frequency */
913 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
914 * The same heap array is used to build all trees.
917 struct bb_archival_gzip_ct_data dyn_ltree
[HEAP_SIZE
]; /* literal and length tree */
918 struct bb_archival_gzip_ct_data dyn_dtree
[2 * D_CODES
+ 1]; /* distance tree */
920 struct bb_archival_gzip_ct_data static_ltree
[L_CODES
+ 2];
922 /* The static literal tree. Since the bit lengths are imposed, there is no
923 * need for the L_CODES extra codes used during heap construction. However
924 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
928 struct bb_archival_gzip_ct_data static_dtree
[D_CODES
];
930 /* The static distance tree. (Actually a trivial tree since all codes use
934 struct bb_archival_gzip_ct_data bl_tree
[2 * BL_CODES
+ 1];
936 /* Huffman tree for the bit lengths */
938 struct bb_archival_gzip_tree_desc l_desc
;
939 struct bb_archival_gzip_tree_desc d_desc
;
940 struct bb_archival_gzip_tree_desc bl_desc
;
942 /* was "ush", but "unsigned" results in smaller code */
943 unsigned bl_count
[MAX_BITS
+ 1];
945 /* The lengths of the bit length codes are sent in order of decreasing
946 * probability, to avoid transmitting the lengths for unused bit length codes.
949 uch depth
[2 * L_CODES
+ 1];
951 /* Depth of each subtree used as tie breaker for trees of equal frequency */
953 uch length_code
[MAX_MATCH
- MIN_MATCH
+ 1];
955 /* length code for each normalized match length (0 == MIN_MATCH) */
959 /* distance codes. The first 256 values correspond to the distances
960 * 3 .. 258, the last 256 values correspond to the top 8 bits of
961 * the 15 bit distances.
964 int base_length
[LENGTH_CODES
];
966 /* First normalized length for each code (0 = MIN_MATCH) */
968 int base_dist
[D_CODES
];
970 /* First normalized distance for each code (0 = distance of 1) */
972 uch flag_buf
[LIT_BUFSIZE
/ 8];
974 /* flag_buf is a bit array distinguishing literals from lengths in
975 * l_buf, thus indicating the presence or absence of a distance.
978 unsigned last_lit
; /* running index in l_buf */
979 unsigned last_dist
; /* running index in d_buf */
980 unsigned last_flags
; /* running index in flag_buf */
981 uch flags
; /* current flags not yet saved in flag_buf */
982 uch flag_bit
; /* current bit used in flags */
984 /* bits are filled in flags starting at bit 0 (least significant).
985 * Note: these flags are overkill in the current code since we don't
986 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE.
989 ulg opt_len
; /* bit length of current block with optimal trees */
990 ulg static_len
; /* bit length of current block with static trees */
992 // ulg compressed_len; /* total bit length of compressed file */
995 #define G2ptr ((struct bb_archival_gzip_globals2*)(bb_ptr_to_globals))
998 /* ===========================================================================
1001 /* Send a code of the given tree. c and tree must not have side effects */
1002 # define SEND_CODE(c, tree) bb_archival_gzip_send_bits(tree[c].Code, tree[c].Len)
1004 # define SEND_CODE(c, tree) \
1006 if (bb_archival_gzip_verbose > 1) bb_error_msg("\ncd %3d ", (c)); \
1007 bb_archival_gzip_send_bits(tree[c].Code, tree[c].Len); \
1011 #define D_CODE(dist) \
1012 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)])
1013 /* Mapping from a distance to a distance code. dist is the distance - 1 and
1014 * must not have side effects. dist_code[256] and dist_code[257] are never
1016 * The arguments must not have side effects.
1019 /* ===========================================================================
1020 * Initialize a new block.
1022 BB_STATIC
void bb_archival_gzip_init_block(void)
1024 int n
; /* iterates over tree elements */
1026 /* Initialize the trees. */
1027 for (n
= 0; n
< L_CODES
; n
++)
1028 G2
.dyn_ltree
[n
].Freq
= 0;
1029 for (n
= 0; n
< D_CODES
; n
++)
1030 G2
.dyn_dtree
[n
].Freq
= 0;
1031 for (n
= 0; n
< BL_CODES
; n
++)
1032 G2
.bl_tree
[n
].Freq
= 0;
1034 G2
.dyn_ltree
[END_BLOCK
].Freq
= 1;
1035 G2
.opt_len
= G2
.static_len
= 0;
1036 G2
.last_lit
= G2
.last_dist
= G2
.last_flags
= 0;
1041 /* ===========================================================================
1042 * Restore the heap property by moving down the tree starting at node k,
1043 * exchanging a node with the smallest of its two sons if necessary, stopping
1044 * when the heap property is re-established (each father smaller than its
1048 /* Compares to subtrees, using the tree depth as tie breaker when
1049 * the subtrees have equal frequency. This minimizes the worst case length. */
1050 #define SMALLER(tree, n, m) \
1051 (tree[n].Freq < tree[m].Freq \
1052 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m]))
1054 BB_STATIC
void bb_archival_gzip_pqdownheap(const struct bb_archival_gzip_ct_data
*tree
, int k
)
1057 int j
= k
<< 1; /* left son of k */
1059 while (j
<= G2
.heap_len
) {
1060 /* Set j to the smallest of the two sons: */
1061 if (j
< G2
.heap_len
&& SMALLER(tree
, G2
.heap
[j
+ 1], G2
.heap
[j
]))
1064 /* Exit if v is smaller than both sons */
1065 if (SMALLER(tree
, v
, G2
.heap
[j
]))
1068 /* Exchange v with the smallest son */
1069 G2
.heap
[k
] = G2
.heap
[j
];
1072 /* And continue down the tree, setting j to the left son of k */
1078 /* ===========================================================================
1079 * Compute the optimal bit lengths for a tree and update the total bit length
1080 * for the current block.
1081 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1082 * above are the tree nodes sorted by increasing frequency.
1083 * OUT assertions: the field len is set to the optimal bit length, the
1084 * array bl_count contains the frequencies for each bit length.
1085 * The length opt_len is updated; static_len is also updated if stree is
1088 BB_STATIC
void bb_archival_gzip_gen_bitlen(const struct bb_archival_gzip_tree_desc
*desc
)
1090 #define tree desc->dyn_tree
1091 int h
; /* heap index */
1092 int n
, m
; /* iterate over the tree elements */
1093 int bits
; /* bit length */
1094 int overflow
; /* number of elements with bit length too large */
1096 for (bits
= 0; bits
< BB_ARRAY_SIZE(G2
.bl_count
); bits
++)
1097 G2
.bl_count
[bits
] = 0;
1099 /* In a first pass, compute the optimal bit lengths (which may
1100 * overflow in the case of the bit length tree).
1102 tree
[G2
.heap
[G2
.heap_max
]].Len
= 0; /* root of the heap */
1105 for (h
= G2
.heap_max
+ 1; h
< HEAP_SIZE
; h
++) {
1106 ulg f
; /* frequency */
1107 int xbits
; /* extra bits */
1110 bits
= tree
[tree
[n
].Dad
].Len
+ 1;
1111 if (bits
> desc
->max_length
) {
1112 bits
= desc
->max_length
;
1115 tree
[n
].Len
= (ush
) bits
;
1116 /* We overwrite tree[n].Dad which is no longer needed */
1118 if (n
> desc
->max_code
)
1119 continue; /* not a leaf node */
1121 G2
.bl_count
[bits
]++;
1123 if (n
>= desc
->extra_base
)
1124 xbits
= desc
->extra_bits
[n
- desc
->extra_base
];
1126 G2
.opt_len
+= f
* (bits
+ xbits
);
1128 if (desc
->static_tree
)
1129 G2
.static_len
+= f
* (desc
->static_tree
[n
].Len
+ xbits
);
1134 Trace((stderr
, "\nbit length overflow\n"));
1135 /* This happens for example on obj2 and pic of the Calgary corpus */
1137 /* Find the first bit length which could increase: */
1139 bits
= desc
->max_length
- 1;
1140 while (G2
.bl_count
[bits
] == 0)
1142 G2
.bl_count
[bits
]--; /* move one leaf down the tree */
1143 G2
.bl_count
[bits
+ 1] += 2; /* move one overflow item as its brother */
1144 G2
.bl_count
[desc
->max_length
]--;
1145 /* The brother of the overflow item also moves one step up,
1146 * but this does not affect bl_count[desc->max_length]
1149 } while (overflow
> 0);
1151 /* Now recompute all bit lengths, scanning in increasing frequency.
1152 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1153 * lengths instead of fixing only the wrong ones. This idea is taken
1154 * from 'ar' written by Haruhiko Okumura.)
1156 for (bits
= desc
->max_length
; bits
!= 0; bits
--) {
1157 n
= G2
.bl_count
[bits
];
1160 if (m
> desc
->max_code
)
1162 if (tree
[m
].Len
!= (unsigned) bits
) {
1163 Trace((stderr
, "code %d bits %d->%d\n", m
, tree
[m
].Len
, bits
));
1164 G2
.opt_len
+= ((int32_t) bits
- tree
[m
].Len
) * tree
[m
].Freq
;
1173 /* ===========================================================================
1174 * Generate the codes for a given tree and bit counts (which need not be
1176 * IN assertion: the array bl_count contains the bit length statistics for
1177 * the given tree and the field len is set for all tree elements.
1178 * OUT assertion: the field code is set for all tree elements of non
1181 BB_STATIC
void bb_archival_gzip_gen_codes(struct bb_archival_gzip_ct_data
*tree
, int max_code
)
1183 /* next_code[] and code used to be "ush", but "unsigned" results in smaller code */
1184 unsigned next_code
[MAX_BITS
+ 1]; /* next code value for each bit length */
1185 unsigned code
= 0; /* running code value */
1186 int bits
; /* bit index */
1187 int n
; /* code index */
1189 /* The distribution counts are first used to generate the code values
1190 * without bit reversal.
1192 for (bits
= 1; bits
<= MAX_BITS
; bits
++) {
1193 next_code
[bits
] = code
= (code
+ G2
.bl_count
[bits
- 1]) << 1;
1195 /* Check that the bit counts in bl_count are consistent. The last code
1198 Assert(code
+ G2
.bl_count
[MAX_BITS
] - 1 == (1 << MAX_BITS
) - 1,
1199 "inconsistent bit counts");
1200 Tracev((stderr
, "\ngen_codes: max_code %d ", max_code
));
1202 for (n
= 0; n
<= max_code
; n
++) {
1203 int len
= tree
[n
].Len
;
1207 /* Now reverse the bits */
1208 tree
[n
].Code
= bb_archival_gzip_bi_reverse(next_code
[len
]++, len
);
1210 Tracec(tree
!= G2
.static_ltree
,
1211 (stderr
, "\nn %3d %c l %2d c %4x (%x) ", n
,
1212 (n
> ' ' ? n
: ' '), len
, tree
[n
].Code
,
1213 next_code
[len
] - 1));
1217 /* ===========================================================================
1218 * Construct one Huffman tree and assigns the code bit strings and lengths.
1219 * Update the total bit length for the current block.
1220 * IN assertion: the field freq is set for all tree elements.
1221 * OUT assertions: the fields len and code are set to the optimal bit length
1222 * and corresponding code. The length opt_len is updated; static_len is
1223 * also updated if stree is not null. The field max_code is set.
1226 /* Remove the smallest element from the heap and recreate the heap with
1227 * one less element. Updates heap and heap_len. */
1230 /* Index within the heap array of least frequent node in the Huffman tree */
1232 #define PQREMOVE(tree, top) \
1234 top = G2.heap[SMALLEST]; \
1235 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \
1236 bb_archival_gzip_pqdownheap(tree, SMALLEST); \
1239 BB_STATIC
void bb_archival_gzip_build_tree(struct bb_archival_gzip_tree_desc
*desc
)
1241 struct bb_archival_gzip_ct_data
*tree
= desc
->dyn_tree
;
1242 struct bb_archival_gzip_ct_data
*stree
= desc
->static_tree
;
1243 int elems
= desc
->elems
;
1244 int n
, m
; /* iterate over heap elements */
1245 int max_code
= -1; /* largest code with non zero frequency */
1246 int node
= elems
; /* next internal node of the tree */
1248 /* Construct the initial heap, with least frequent element in
1249 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1250 * heap[0] is not used.
1253 G2
.heap_max
= HEAP_SIZE
;
1255 for (n
= 0; n
< elems
; n
++) {
1256 if (tree
[n
].Freq
!= 0) {
1257 G2
.heap
[++G2
.heap_len
] = max_code
= n
;
1264 /* The pkzip format requires that at least one distance code exists,
1265 * and that at least one bit should be sent even if there is only one
1266 * possible code. So to avoid special checks later on we force at least
1267 * two codes of non zero frequency.
1269 while (G2
.heap_len
< 2) {
1270 int new = G2
.heap
[++G2
.heap_len
] = (max_code
< 2 ? ++max_code
: 0);
1276 G2
.static_len
-= stree
[new].Len
;
1277 /* new is 0 or 1 so it does not have extra bits */
1279 desc
->max_code
= max_code
;
1281 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1282 * establish sub-heaps of increasing lengths:
1284 for (n
= G2
.heap_len
/ 2; n
>= 1; n
--)
1285 bb_archival_gzip_pqdownheap(tree
, n
);
1287 /* Construct the Huffman tree by repeatedly combining the least two
1291 PQREMOVE(tree
, n
); /* n = node of least frequency */
1292 m
= G2
.heap
[SMALLEST
]; /* m = node of next least frequency */
1294 G2
.heap
[--G2
.heap_max
] = n
; /* keep the nodes sorted by frequency */
1295 G2
.heap
[--G2
.heap_max
] = m
;
1297 /* Create a new node father of n and m */
1298 tree
[node
].Freq
= tree
[n
].Freq
+ tree
[m
].Freq
;
1299 G2
.depth
[node
] = BB_MAX(G2
.depth
[n
], G2
.depth
[m
]) + 1;
1300 tree
[n
].Dad
= tree
[m
].Dad
= (ush
) node
;
1302 if (tree
== G2
.bl_tree
) {
1303 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)",
1304 node
, tree
[node
].Freq
, n
, tree
[n
].Freq
, m
, tree
[m
].Freq
);
1307 /* and insert the new node in the heap */
1308 G2
.heap
[SMALLEST
] = node
++;
1309 bb_archival_gzip_pqdownheap(tree
, SMALLEST
);
1310 } while (G2
.heap_len
>= 2);
1312 G2
.heap
[--G2
.heap_max
] = G2
.heap
[SMALLEST
];
1314 /* At this point, the fields freq and dad are set. We can now
1315 * generate the bit lengths.
1317 bb_archival_gzip_gen_bitlen(desc
);
1319 /* The field len is now set, we can generate the bit codes */
1320 bb_archival_gzip_gen_codes(tree
, max_code
);
1323 /* ===========================================================================
1324 * Scan a literal or distance tree to determine the frequencies of the codes
1325 * in the bit length tree. Updates opt_len to take into account the repeat
1326 * counts. (The contribution of the bit length codes will be added later
1327 * during the construction of bl_tree.)
1329 BB_STATIC
void bb_archival_gzip_scan_tree(struct bb_archival_gzip_ct_data
*tree
, int max_code
)
1331 int n
; /* iterates over all tree elements */
1332 int prevlen
= -1; /* last emitted length */
1333 int curlen
; /* length of current code */
1334 int nextlen
= tree
[0].Len
; /* length of next code */
1335 int count
= 0; /* repeat count of the current code */
1336 int max_count
= 7; /* max repeat count */
1337 int min_count
= 4; /* min repeat count */
1343 tree
[max_code
+ 1].Len
= 0xffff; /* guard */
1345 for (n
= 0; n
<= max_code
; n
++) {
1347 nextlen
= tree
[n
+ 1].Len
;
1348 if (++count
< max_count
&& curlen
== nextlen
)
1351 if (count
< min_count
) {
1352 G2
.bl_tree
[curlen
].Freq
+= count
;
1353 } else if (curlen
!= 0) {
1354 if (curlen
!= prevlen
)
1355 G2
.bl_tree
[curlen
].Freq
++;
1356 G2
.bl_tree
[REP_3_6
].Freq
++;
1357 } else if (count
<= 10) {
1358 G2
.bl_tree
[REPZ_3_10
].Freq
++;
1360 G2
.bl_tree
[REPZ_11_138
].Freq
++;
1370 } else if (curlen
== nextlen
) {
1377 /* ===========================================================================
1378 * Send a literal or distance tree in compressed form, using the codes in
1381 BB_STATIC
void bb_archival_gzip_send_tree(const struct bb_archival_gzip_ct_data
*tree
, int max_code
)
1383 int n
; /* iterates over all tree elements */
1384 int prevlen
= -1; /* last emitted length */
1385 int curlen
; /* length of current code */
1386 int nextlen
= tree
[0].Len
; /* length of next code */
1387 int count
= 0; /* repeat count of the current code */
1388 int max_count
= 7; /* max repeat count */
1389 int min_count
= 4; /* min repeat count */
1391 /* tree[max_code+1].Len = -1; *//* guard already set */
1393 max_count
= 138, min_count
= 3;
1395 for (n
= 0; n
<= max_code
; n
++) {
1397 nextlen
= tree
[n
+ 1].Len
;
1398 if (++count
< max_count
&& curlen
== nextlen
) {
1400 } else if (count
< min_count
) {
1402 SEND_CODE(curlen
, G2
.bl_tree
);
1404 } else if (curlen
!= 0) {
1405 if (curlen
!= prevlen
) {
1406 SEND_CODE(curlen
, G2
.bl_tree
);
1409 Assert(count
>= 3 && count
<= 6, " 3_6?");
1410 SEND_CODE(REP_3_6
, G2
.bl_tree
);
1411 bb_archival_gzip_send_bits(count
- 3, 2);
1412 } else if (count
<= 10) {
1413 SEND_CODE(REPZ_3_10
, G2
.bl_tree
);
1414 bb_archival_gzip_send_bits(count
- 3, 3);
1416 SEND_CODE(REPZ_11_138
, G2
.bl_tree
);
1417 bb_archival_gzip_send_bits(count
- 11, 7);
1424 } else if (curlen
== nextlen
) {
1434 /* ===========================================================================
1435 * Construct the Huffman tree for the bit lengths and return the index in
1436 * bl_order of the last bit length code to send.
1438 BB_STATIC
int bb_archival_gzip_build_bl_tree(void)
1440 int max_blindex
; /* index of last bit length code of non zero freq */
1442 /* Determine the bit length frequencies for literal and distance trees */
1443 bb_archival_gzip_scan_tree(G2
.dyn_ltree
, G2
.l_desc
.max_code
);
1444 bb_archival_gzip_scan_tree(G2
.dyn_dtree
, G2
.d_desc
.max_code
);
1446 /* Build the bit length tree: */
1447 bb_archival_gzip_build_tree(&G2
.bl_desc
);
1448 /* opt_len now includes the length of the tree representations, except
1449 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1452 /* Determine the number of bit length codes to send. The pkzip format
1453 * requires that at least 4 bit length codes be sent. (appnote.txt says
1454 * 3 but the actual value used is 4.)
1456 for (max_blindex
= BL_CODES
- 1; max_blindex
>= 3; max_blindex
--) {
1457 if (G2
.bl_tree
[bb_archival_gzip_bl_order
[max_blindex
]].Len
!= 0)
1460 /* Update opt_len to include the bit length tree and counts */
1461 G2
.opt_len
+= 3 * (max_blindex
+ 1) + 5 + 5 + 4;
1462 Tracev((stderr
, "\ndyn trees: dyn %ld, stat %ld", (long)G2
.opt_len
, (long)G2
.static_len
));
1467 /* ===========================================================================
1468 * Send the header for a block using dynamic Huffman trees: the counts, the
1469 * lengths of the bit length codes, the literal tree and the distance tree.
1470 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1472 BB_STATIC
void bb_archival_gzip_send_all_trees(int lcodes
, int dcodes
, int blcodes
)
1474 int rank
; /* index in bl_order */
1476 Assert(lcodes
>= 257 && dcodes
>= 1 && blcodes
>= 4, "not enough codes");
1477 Assert(lcodes
<= L_CODES
&& dcodes
<= D_CODES
1478 && blcodes
<= BL_CODES
, "too many codes");
1479 Tracev((stderr
, "\nbl counts: "));
1480 bb_archival_gzip_send_bits(lcodes
- 257, 5); /* not +255 as stated in appnote.txt */
1481 bb_archival_gzip_send_bits(dcodes
- 1, 5);
1482 bb_archival_gzip_send_bits(blcodes
- 4, 4); /* not -3 as stated in appnote.txt */
1483 for (rank
= 0; rank
< blcodes
; rank
++) {
1484 Tracev((stderr
, "\nbl code %2d ", bb_archival_gzip_bl_order
[rank
]));
1485 bb_archival_gzip_send_bits(G2
.bl_tree
[bb_archival_gzip_bl_order
[rank
]].Len
, 3);
1487 Tracev((stderr
, "\nbl tree: sent %ld", (long)G1
.bits_sent
));
1489 bb_archival_gzip_send_tree((struct bb_archival_gzip_ct_data
*) G2
.dyn_ltree
, lcodes
- 1); /* send the literal tree */
1490 Tracev((stderr
, "\nlit tree: sent %ld", (long)G1
.bits_sent
));
1492 bb_archival_gzip_send_tree((struct bb_archival_gzip_ct_data
*) G2
.dyn_dtree
, dcodes
- 1); /* send the distance tree */
1493 Tracev((stderr
, "\ndist tree: sent %ld", (long)G1
.bits_sent
));
1496 /* ===========================================================================
1497 * Save the match info and tally the frequency counts. Return true if
1498 * the current block must be flushed.
1500 BB_STATIC
int bb_archival_gzip_ct_tally(int dist
, int lc
)
1502 G1
.l_buf
[G2
.last_lit
++] = lc
;
1504 /* lc is the unmatched char */
1505 G2
.dyn_ltree
[lc
].Freq
++;
1507 /* Here, lc is the match length - MIN_MATCH */
1508 dist
--; /* dist = match distance - 1 */
1509 Assert((ush
) dist
< (ush
) MAX_DIST
1510 && (ush
) lc
<= (ush
) (MAX_MATCH
- MIN_MATCH
)
1511 && (ush
) D_CODE(dist
) < (ush
) D_CODES
, "ct_tally: bad match"
1514 G2
.dyn_ltree
[G2
.length_code
[lc
] + LITERALS
+ 1].Freq
++;
1515 G2
.dyn_dtree
[D_CODE(dist
)].Freq
++;
1517 G1
.d_buf
[G2
.last_dist
++] = dist
;
1518 G2
.flags
|= G2
.flag_bit
;
1522 /* Output the flags if they fill a byte: */
1523 if ((G2
.last_lit
& 7) == 0) {
1524 G2
.flag_buf
[G2
.last_flags
++] = G2
.flags
;
1528 /* Try to guess if it is profitable to stop the current block here */
1529 if ((G2
.last_lit
& 0xfff) == 0) {
1530 /* Compute an upper bound for the compressed length */
1531 ulg out_length
= G2
.last_lit
* 8L;
1532 ulg in_length
= (ulg
) G1
.strstart
- G1
.block_start
;
1535 for (dcode
= 0; dcode
< D_CODES
; dcode
++) {
1536 out_length
+= G2
.dyn_dtree
[dcode
].Freq
* (5L + bb_archival_gzip_extra_dbits
[dcode
]);
1540 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
1541 G2
.last_lit
, G2
.last_dist
,
1542 (long)in_length
, (long)out_length
,
1543 100L - out_length
* 100L / in_length
));
1544 if (G2
.last_dist
< G2
.last_lit
/ 2 && out_length
< in_length
/ 2)
1547 return (G2
.last_lit
== LIT_BUFSIZE
- 1 || G2
.last_dist
== DIST_BUFSIZE
);
1548 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
1549 * on 16 bit machines and because stored blocks are restricted to
1554 /* ===========================================================================
1555 * Send the block data compressed using the given Huffman trees
1557 BB_STATIC
void bb_archival_gzip_compress_block(const struct bb_archival_gzip_ct_data
*ltree
,
1558 const struct bb_archival_gzip_ct_data
*dtree
)
1560 unsigned dist
; /* distance of matched string */
1561 int lc
; /* match length or unmatched char (if dist == 0) */
1562 unsigned lx
= 0; /* running index in l_buf */
1563 unsigned dx
= 0; /* running index in d_buf */
1564 unsigned fx
= 0; /* running index in flag_buf */
1565 uch flag
= 0; /* current flags */
1566 unsigned code
; /* the code to send */
1567 int extra
; /* number of extra bits to send */
1569 if (G2
.last_lit
!= 0) do {
1571 flag
= G2
.flag_buf
[fx
++];
1572 lc
= G1
.l_buf
[lx
++];
1573 if ((flag
& 1) == 0) {
1574 SEND_CODE(lc
, ltree
); /* send a literal byte */
1575 Tracecv(lc
> ' ', (stderr
, " '%c' ", lc
));
1577 /* Here, lc is the match length - MIN_MATCH */
1578 code
= G2
.length_code
[lc
];
1579 SEND_CODE(code
+ LITERALS
+ 1, ltree
); /* send the length code */
1580 extra
= bb_archival_gzip_extra_lbits
[code
];
1582 lc
-= G2
.base_length
[code
];
1583 bb_archival_gzip_send_bits(lc
, extra
); /* send the extra length bits */
1585 dist
= G1
.d_buf
[dx
++];
1586 /* Here, dist is the match distance - 1 */
1587 code
= D_CODE(dist
);
1588 Assert(code
< D_CODES
, "bad d_code");
1590 SEND_CODE(code
, dtree
); /* send the distance code */
1591 extra
= bb_archival_gzip_extra_dbits
[code
];
1593 dist
-= G2
.base_dist
[code
];
1594 bb_archival_gzip_send_bits(dist
, extra
); /* send the extra distance bits */
1596 } /* literal or match pair ? */
1598 } while (lx
< G2
.last_lit
);
1600 SEND_CODE(END_BLOCK
, ltree
);
1603 /* ===========================================================================
1604 * Determine the best encoding for the current block: dynamic trees, static
1605 * trees or store, and output the encoded block to the zip file. This function
1606 * returns the total compressed length for the file so far.
1608 BB_STATIC
void bb_archival_gzip_flush_block(const char *buf
, ulg stored_len
, int eof
)
1610 ulg opt_lenb
, static_lenb
; /* opt_len and static_len in bytes */
1611 int max_blindex
; /* index of last bit length code of non zero freq */
1613 G2
.flag_buf
[G2
.last_flags
] = G2
.flags
; /* Save the flags for the last 8 items */
1615 /* Construct the literal and distance trees */
1616 bb_archival_gzip_build_tree(&G2
.l_desc
);
1617 Tracev((stderr
, "\nlit data: dyn %ld, stat %ld", (long)G2
.opt_len
, (long)G2
.static_len
));
1619 bb_archival_gzip_build_tree(&G2
.d_desc
);
1620 Tracev((stderr
, "\ndist data: dyn %ld, stat %ld", (long)G2
.opt_len
, (long)G2
.static_len
));
1621 /* At this point, opt_len and static_len are the total bit lengths of
1622 * the compressed block data, excluding the tree representations.
1625 /* Build the bit length tree for the above two trees, and get the index
1626 * in bl_order of the last bit length code to send.
1628 max_blindex
= bb_archival_gzip_build_bl_tree();
1630 /* Determine the best encoding. Compute first the block length in bytes */
1631 opt_lenb
= (G2
.opt_len
+ 3 + 7) >> 3;
1632 static_lenb
= (G2
.static_len
+ 3 + 7) >> 3;
1635 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
1636 (unsigned long)opt_lenb
, (unsigned long)G2
.opt_len
,
1637 (unsigned long)static_lenb
, (unsigned long)G2
.static_len
,
1638 (unsigned long)stored_len
,
1639 G2
.last_lit
, G2
.last_dist
));
1641 if (static_lenb
<= opt_lenb
)
1642 opt_lenb
= static_lenb
;
1644 /* If compression failed and this is the first and last block,
1645 * and if the zip file can be seeked (to rewrite the local header),
1646 * the whole file is transformed into a stored file:
1648 // seekable() is constant FALSE in busybox, and G2.compressed_len is disabled
1649 // (this was the only user)
1650 // if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) {
1651 // /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
1653 // bb_error_msg("block vanished");
1655 // G2.compressed_len = stored_len << 3;
1656 // copy_block(buf, (unsigned) stored_len, 0); /* without header */
1658 if (stored_len
+ 4 <= opt_lenb
&& buf
!= NULL
) {
1659 /* 4: two words for the lengths */
1660 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1661 * Otherwise we can't have processed more than WSIZE input bytes since
1662 * the last block flush, because compression would have been
1663 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1664 * transform a block into a stored block.
1666 bb_archival_gzip_send_bits((STORED_BLOCK
<< 1) + eof
, 3); /* send block type */
1667 // G2.compressed_len = ((G2.compressed_len + 3 + 7) & ~7L)
1668 // + ((stored_len + 4) << 3);
1669 bb_archival_gzip_copy_block(buf
, (unsigned) stored_len
, 1); /* with header */
1671 if (static_lenb
== opt_lenb
) {
1672 bb_archival_gzip_send_bits((STATIC_TREES
<< 1) + eof
, 3);
1673 bb_archival_gzip_compress_block((struct bb_archival_gzip_ct_data
*) G2
.static_ltree
, (struct bb_archival_gzip_ct_data
*) G2
.static_dtree
);
1674 // G2.compressed_len += 3 + G2.static_len;
1676 bb_archival_gzip_send_bits((DYN_TREES
<< 1) + eof
, 3);
1677 bb_archival_gzip_send_all_trees(G2
.l_desc
.max_code
+ 1, G2
.d_desc
.max_code
+ 1,
1679 bb_archival_gzip_compress_block((struct bb_archival_gzip_ct_data
*) G2
.dyn_ltree
, (struct bb_archival_gzip_ct_data
*) G2
.dyn_dtree
);
1680 // G2.compressed_len += 3 + G2.opt_len;
1682 // Assert(G2.compressed_len == G1.bits_sent, "bad compressed size");
1683 bb_archival_gzip_init_block();
1686 bb_archival_gzip_bi_windup();
1687 // G2.compressed_len += 7; /* align on byte boundary */
1689 // Tracev((stderr, "\ncomprlen %lu(%lu) ",
1690 // (unsigned long)G2.compressed_len >> 3,
1691 // (unsigned long)G2.compressed_len - 7 * eof));
1693 return; /* was "return G2.compressed_len >> 3;" */
1696 /* ===========================================================================
1697 * Update a hash value with the given input byte
1698 * IN assertion: all calls to UPDATE_HASH are made with consecutive
1699 * input characters, so that a running hash key can be computed from the
1700 * previous key instead of complete recalculation each time.
1702 #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK)
1704 /* ===========================================================================
1705 * Same as above, but achieves better compression. We use a lazy
1706 * evaluation for matches: a match is finally adopted only if there is
1707 * no better match at the next window position.
1709 * Processes a new input file and return its compressed length. Sets
1710 * the compressed length, crc, deflate flags and internal file
1714 /* Flush the current block, with given end-of-file flag.
1715 * IN assertion: strstart is set to the end of the current match. */
1716 #define FLUSH_BLOCK(eof) \
1717 bb_archival_gzip_flush_block( \
1718 G1.block_start >= 0L \
1719 ? (char*)&G1.window[(unsigned)G1.block_start] \
1721 (ulg)G1.strstart - G1.block_start, \
1725 /* Insert string s in the dictionary and set match_head to the previous head
1726 * of the hash chain (the most recent string with same hash key). Return
1727 * the previous length of the hash chain.
1728 * IN assertion: all calls to INSERT_STRING are made with consecutive
1729 * input characters and the first MIN_MATCH bytes of s are valid
1730 * (except for the last MIN_MATCH-1 bytes of the input file). */
1731 #define INSERT_STRING(s, match_head) \
1733 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \
1734 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \
1735 head[G1.ins_h] = (s); \
1738 BB_STATIC
void bb_archival_gzip_deflate(void)
1740 IPos hash_head
; /* head of hash chain */
1741 IPos prev_match
; /* previous match */
1742 int flush
; /* set if current block must be flushed */
1743 int match_available
= 0; /* set if previous match exists */
1744 unsigned match_length
= MIN_MATCH
- 1; /* length of best match */
1746 /* Process the input block. */
1747 while (G1
.lookahead
!= 0) {
1748 /* Insert the string window[strstart .. strstart+2] in the
1749 * dictionary, and set hash_head to the head of the hash chain:
1751 INSERT_STRING(G1
.strstart
, hash_head
);
1753 /* Find the longest match, discarding those <= prev_length.
1755 G1
.prev_length
= match_length
;
1756 prev_match
= G1
.match_start
;
1757 match_length
= MIN_MATCH
- 1;
1759 if (hash_head
!= 0 && G1
.prev_length
< max_lazy_match
1760 && G1
.strstart
- hash_head
<= MAX_DIST
1762 /* To simplify the code, we prevent matches with the string
1763 * of window index 0 (in particular we have to avoid a match
1764 * of the string with itself at the start of the input file).
1766 match_length
= bb_archival_gzip_longest_match(hash_head
);
1767 /* longest_match() sets match_start */
1768 if (match_length
> G1
.lookahead
)
1769 match_length
= G1
.lookahead
;
1771 /* Ignore a length 3 match if it is too distant: */
1772 if (match_length
== MIN_MATCH
&& G1
.strstart
- G1
.match_start
> TOO_FAR
) {
1773 /* If prev_match is also MIN_MATCH, G1.match_start is garbage
1774 * but we will ignore the current match anyway.
1779 /* If there was a match at the previous step and the current
1780 * match is not better, output the previous match:
1782 if (G1
.prev_length
>= MIN_MATCH
&& match_length
<= G1
.prev_length
) {
1783 bb_archival_gzip_check_match(G1
.strstart
- 1, prev_match
, G1
.prev_length
);
1784 flush
= bb_archival_gzip_ct_tally(G1
.strstart
- 1 - prev_match
, G1
.prev_length
- MIN_MATCH
);
1786 /* Insert in hash table all strings up to the end of the match.
1787 * strstart-1 and strstart are already inserted.
1789 G1
.lookahead
-= G1
.prev_length
- 1;
1790 G1
.prev_length
-= 2;
1793 INSERT_STRING(G1
.strstart
, hash_head
);
1794 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1795 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
1796 * these bytes are garbage, but it does not matter since the
1797 * next lookahead bytes will always be emitted as literals.
1799 } while (--G1
.prev_length
!= 0);
1800 match_available
= 0;
1801 match_length
= MIN_MATCH
- 1;
1805 G1
.block_start
= G1
.strstart
;
1807 } else if (match_available
) {
1808 /* If there was no match at the previous position, output a
1809 * single literal. If there was a match but the current match
1810 * is longer, truncate the previous match to a single literal.
1812 Tracevv((stderr
, "%c", G1
.window
[G1
.strstart
- 1]));
1813 if (bb_archival_gzip_ct_tally(0, G1
.window
[G1
.strstart
- 1])) {
1815 G1
.block_start
= G1
.strstart
;
1820 /* There is no previous match to compare with, wait for
1821 * the next step to decide.
1823 match_available
= 1;
1827 Assert(G1
.strstart
<= G1
.isize
&& G1
.lookahead
<= G1
.isize
, "a bit too far");
1829 /* Make sure that we always have enough lookahead, except
1830 * at the end of the input file. We need MAX_MATCH bytes
1831 * for the next match, plus MIN_MATCH bytes to insert the
1832 * string following the next match.
1834 bb_archival_gzip_fill_window_if_needed();
1836 if (match_available
)
1837 bb_archival_gzip_ct_tally(0, G1
.window
[G1
.strstart
- 1]);
1839 FLUSH_BLOCK(1); /* eof */
1842 /* ===========================================================================
1843 * Initialize the bit string routines.
1846 /* ===========================================================================
1847 * Initialize the "longest match" routines for a new file
1849 BB_STATIC
void bb_archival_gzip_lm_init(void)
1853 /* Initialize the hash table. */
1854 memset(head
, 0, HASH_SIZE
* sizeof(*head
));
1855 /* prev will be initialized on the fly */
1857 /* ??? reduce max_chain_length for binary files */
1859 //G1.strstart = 0; // globals are zeroed in pack_gzip()
1860 //G1.block_start = 0L; // globals are zeroed in pack_gzip()
1862 G1
.lookahead
= bb_archival_gzip_file_read(G1
.window
,
1863 sizeof(int) <= 2 ? (unsigned) WSIZE
: 2 * WSIZE
);
1865 if (G1
.lookahead
== 0 || G1
.lookahead
== (unsigned) -1) {
1870 //G1.eofile = 0; // globals are zeroed in pack_gzip()
1872 /* Make sure that we always have enough lookahead. This is important
1873 * if input comes from a device such as a tty.
1875 bb_archival_gzip_fill_window_if_needed();
1877 //G1.ins_h = 0; // globals are zeroed in pack_gzip()
1878 for (j
= 0; j
< MIN_MATCH
- 1; j
++)
1879 UPDATE_HASH(G1
.ins_h
, G1
.window
[j
]);
1880 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is
1881 * not important since only literal bytes will be emitted.
1885 /* ===========================================================================
1886 * Allocate the match buffer, initialize the various tables and save the
1887 * location of the internal file attribute (ascii/binary) and method
1889 * One callsite in zip()
1891 BB_STATIC
void bb_archival_gzip_ct_init(void)
1893 int n
; /* iterates over tree elements */
1894 int length
; /* length value */
1895 int code
; /* code value */
1896 int dist
; /* distance index */
1898 // //G2.compressed_len = 0L; // globals are zeroed in pack_gzip()
1901 if (G2
.static_dtree
[0].Len
!= 0)
1902 return; /* ct_init already called */
1905 /* Initialize the mapping length (0..255) -> length code (0..28) */
1907 for (code
= 0; code
< LENGTH_CODES
- 1; code
++) {
1908 G2
.base_length
[code
] = length
;
1909 for (n
= 0; n
< (1 << bb_archival_gzip_extra_lbits
[code
]); n
++) {
1910 G2
.length_code
[length
++] = code
;
1913 Assert(length
== 256, "ct_init: length != 256");
1914 /* Note that the length 255 (match length 258) can be represented
1915 * in two different ways: code 284 + 5 bits or code 285, so we
1916 * overwrite length_code[255] to use the best encoding:
1918 G2
.length_code
[length
- 1] = code
;
1920 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1922 for (code
= 0; code
< 16; code
++) {
1923 G2
.base_dist
[code
] = dist
;
1924 for (n
= 0; n
< (1 << bb_archival_gzip_extra_dbits
[code
]); n
++) {
1925 G2
.dist_code
[dist
++] = code
;
1928 Assert(dist
== 256, "ct_init: dist != 256");
1929 dist
>>= 7; /* from now on, all distances are divided by 128 */
1930 for (; code
< D_CODES
; code
++) {
1931 G2
.base_dist
[code
] = dist
<< 7;
1932 for (n
= 0; n
< (1 << (bb_archival_gzip_extra_dbits
[code
] - 7)); n
++) {
1933 G2
.dist_code
[256 + dist
++] = code
;
1936 Assert(dist
== 256, "ct_init: 256+dist != 512");
1938 /* Construct the codes of the static literal tree */
1939 //for (n = 0; n <= MAX_BITS; n++) // globals are zeroed in pack_gzip()
1940 // G2.bl_count[n] = 0;
1944 G2
.static_ltree
[n
++].Len
= 8;
1947 //G2.bl_count[8] = 143 + 1;
1949 G2
.static_ltree
[n
++].Len
= 9;
1952 //G2.bl_count[9] = 255 - 143;
1954 G2
.static_ltree
[n
++].Len
= 7;
1957 //G2.bl_count[7] = 279 - 255;
1959 G2
.static_ltree
[n
++].Len
= 8;
1962 //G2.bl_count[8] += 287 - 279;
1963 G2
.bl_count
[7] = 279 - 255;
1964 G2
.bl_count
[8] = (143 + 1) + (287 - 279);
1965 G2
.bl_count
[9] = 255 - 143;
1966 /* Codes 286 and 287 do not exist, but we must include them in the
1967 * tree construction to get a canonical Huffman tree (longest code
1970 bb_archival_gzip_gen_codes((struct bb_archival_gzip_ct_data
*) G2
.static_ltree
, L_CODES
+ 1);
1972 /* The static distance tree is trivial: */
1973 for (n
= 0; n
< D_CODES
; n
++) {
1974 G2
.static_dtree
[n
].Len
= 5;
1975 G2
.static_dtree
[n
].Code
= bb_archival_gzip_bi_reverse(n
, 5);
1978 /* Initialize the first block of the first file: */
1979 bb_archival_gzip_init_block();
1982 /* ===========================================================================
1983 * Deflate in to out.
1984 * IN assertions: the input and output buffers are cleared.
1986 BB_STATIC
void bb_archival_gzip_zip(void)
1988 unsigned deflate_flags
;
1990 //G1.outcnt = 0; // globals are zeroed in pack_gzip()
1992 /* Write the header to the gzip file. See algorithm.doc for the format */
1993 /* magic header for gzip files: 1F 8B */
1994 /* compression method: 8 (DEFLATED) */
1995 /* general flags: 0 */
1996 bb_archival_gzip_put_32bit(0x00088b1f);
1997 bb_archival_gzip_put_32bit(0); /* Unix timestamp */
1999 /* Write deflated file to zip file */
2002 bb_archival_gzip_ct_init();
2003 bb_archival_gzip_lm_init();
2005 deflate_flags
= 0x300; /* extra flags. OS id = 3 (Unix) */
2006 /* Note that comp_level < 4 do not exist in this version of gzip */
2007 if (comp_level_minus4
== 9 - 4) {
2008 deflate_flags
|= 0x02; /* SLOW flag */
2010 bb_archival_gzip_put_16bit(deflate_flags
);
2012 /* The above 32-bit misaligns outbuf (10 bytes are stored), flush it */
2013 bb_archival_gzip_flush_outbuf_if_32bit_optimized();
2015 bb_archival_gzip_deflate();
2017 /* Write the crc and uncompressed size */
2018 bb_archival_gzip_put_32bit(~G1
.crc
);
2019 bb_archival_gzip_put_32bit(G1
.isize
);
2021 bb_archival_gzip_flush_outbuf();
2024 /* ======================================================================== */
2025 BB_STATIC
long bb_archival_gzip_pack_gzip(bb_archive_transformer_state_t
*xstate
)
2027 /* Reinit G1.xxx except pointers to allocated buffers, and entire G2 */
2028 memset(&G1
.crc
, 0, (sizeof(G1
) - offsetof(struct bb_archival_gzip_globals
, crc
)) + sizeof(G2
));
2030 /* Clear input and output buffers */
2038 G2
.l_desc
.dyn_tree
= G2
.dyn_ltree
;
2039 G2
.l_desc
.static_tree
= G2
.static_ltree
;
2040 G2
.l_desc
.extra_bits
= bb_archival_gzip_extra_lbits
;
2041 G2
.l_desc
.extra_base
= LITERALS
+ 1;
2042 G2
.l_desc
.elems
= L_CODES
;
2043 G2
.l_desc
.max_length
= MAX_BITS
;
2044 //G2.l_desc.max_code = 0;
2045 G2
.d_desc
.dyn_tree
= G2
.dyn_dtree
;
2046 G2
.d_desc
.static_tree
= G2
.static_dtree
;
2047 G2
.d_desc
.extra_bits
= bb_archival_gzip_extra_dbits
;
2048 //G2.d_desc.extra_base = 0;
2049 G2
.d_desc
.elems
= D_CODES
;
2050 G2
.d_desc
.max_length
= MAX_BITS
;
2051 //G2.d_desc.max_code = 0;
2052 G2
.bl_desc
.dyn_tree
= G2
.bl_tree
;
2053 //G2.bl_desc.static_tree = NULL;
2054 G2
.bl_desc
.extra_bits
= bb_archival_gzip_extra_blbits
,
2055 //G2.bl_desc.extra_base = 0;
2056 G2
.bl_desc
.elems
= BL_CODES
;
2057 G2
.bl_desc
.max_length
= MAX_BL_BITS
;
2058 //G2.bl_desc.max_code = 0;
2061 /* Saving of timestamp is disabled. Why?
2062 * - it is not Y2038-safe.
2063 * - some people want deterministic results
2064 * (normally they'd use -n, but our -n is a nop).
2066 * Per RFC 1952, gzfile.time=0 is "no timestamp".
2067 * If users will demand this to be reinstated,
2068 * implement -n "don't save timestamp".
2072 fstat(STDIN_FILENO
, &s
);
2075 bb_archival_gzip_zip();
2080 BB_STATIC
const char bb_archival_gzip_longopts
[] =
2081 "stdout\0" bb_No_argument
"c"
2082 "to-stdout\0" bb_No_argument
"c"
2083 "force\0" bb_No_argument
"f"
2084 "verbose\0" bb_No_argument
"v"
2085 "decompress\0" bb_No_argument
"d"
2086 "uncompress\0" bb_No_argument
"d"
2087 "test\0" bb_No_argument
"t"
2088 "quiet\0" bb_No_argument
"q"
2089 "fast\0" bb_No_argument
"1"
2090 "best\0" bb_No_argument
"9"
2091 "no-name\0" bb_No_argument
"n"
2095 * Linux kernel build uses gzip -d -n. We accept and ignore -n.
2098 * gzip: do not save the original file name and time stamp.
2099 * (The original name is always saved if the name had to be truncated.)
2100 * gunzip: do not restore the original file name/time even if present
2101 * (remove only the gzip suffix from the compressed file name).
2102 * This option is the default when decompressing.
2104 * gzip: always save the original file name and time stamp (this is the default)
2105 * gunzip: restore the original file name and time stamp if present.
2108 BB_STATIC
int bb_archival_gzip_main(int argc
, char **argv
)
2111 static const struct {
2113 uint8_t chain_shift
;
2116 } gzip_level_config
[6] = {
2117 {4, 4, 4/2, 16/2}, /* Level 4 */
2118 {8, 5, 16/2, 32/2}, /* Level 5 */
2119 {8, 7, 16/2, 128/2}, /* Level 6 */
2120 {8, 8, 32/2, 128/2}, /* Level 7 */
2121 {32, 10, 128/2, 258/2}, /* Level 8 */
2122 {32, 12, 258/2, 258/2}, /* Level 9 */
2125 BB_SET_PTR_TO_GLOBALS((char *)bb_xzalloc(sizeof(struct bb_archival_gzip_globals
)
2126 + sizeof(struct bb_archival_gzip_globals2
))
2127 + sizeof(struct bb_archival_gzip_globals
));
2129 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */
2130 opt
= bb_getopt32long(argv
, BB_ARCHIVE_UNPK_OPTSTR
"dt" "n123456789", bb_archival_gzip_longopts
);
2131 if (opt
& (BB_ARCHIVE_UNPK_OPT_DECOMPRESS
|BB_ARCHIVE_UNPK_OPT_TEST
)) /* -d and/or -t */
2132 return bb_archival_gunzip_main(argc
, argv
);
2133 opt
>>= (BB_ARCHIVE_UNPK_OPTSTRLEN
+ 2 + 1); /* drop cfkvq[dt]n bits */
2135 opt
= 1 << 5; /* default: 6 */
2136 opt
= ffs(opt
>> 4); /* Maps -1..-4 to [0], -5 to [1] ... -9 to [5] */
2138 comp_level_minus4
= opt
;
2140 max_chain_length
= 1 << gzip_level_config
[opt
].chain_shift
;
2141 good_match
= gzip_level_config
[opt
].good
;
2142 max_lazy_match
= gzip_level_config
[opt
].lazy2
* 2;
2143 nice_match
= gzip_level_config
[opt
].nice2
* 2;
2144 bb_option_mask32
&= BB_ARCHIVE_UNPK_OPTSTRMASK
; /* retain only -cfkvq */
2146 /* Allocate all global buffers (for DYN_ALLOC option) */
2147 ALLOC(uch
, G1
.l_buf
, INBUFSIZ
);
2148 ALLOC(uch
, G1
.outbuf
, OUTBUFSIZ
);
2149 ALLOC(ush
, G1
.d_buf
, DIST_BUFSIZE
);
2150 ALLOC(uch
, G1
.window
, 2L * WSIZE
);
2151 ALLOC(ush
, G1
.prev
, 1L << BITS
);
2153 /* Initialize the CRC32 table */
2154 bb_global_crc32_new_table_le();
2157 return bb_archival_bbunpack(argv
, bb_archival_gzip_pack_gzip
, bb_archival_append_ext
, "gz");
2159 BB_STATIC
struct bb_applet_desc bb_archival_gzip_applet_desc
= {
2161 bb_archival_gzip_main
,
2163 "[-cfk" "dt" "123456789" "] [FILE]..."
2166 "Compress FILEs (or stdin)\n"
2167 "\n -1..9 Compression level"
2169 "\n -c Write to stdout"
2171 "\n -k Keep input files"
2172 "\n -t Test integrity"
2174 "$ ls -la /tmp/busybox*\n"
2175 "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n"
2176 "$ gzip /tmp/busybox.tar\n"
2177 "$ ls -la /tmp/busybox*\n"
2178 "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n"
2182 /*================================================================================================*/
2209 #undef MIN_LOOKAHEAD
2213 #undef translate_eol
2217 #undef MAX_EXT_CHARS
2234 #undef comp_level_minus4
2235 #undef max_chain_length
2236 #undef max_lazy_match
2243 #undef DEBUG_bits_sent
2246 #undef bb_archival_gzip_put_8bit
2247 #undef OPTIMIZED_PUT_32BIT
2248 #undef bb_archival_gzip_check_match
2279 #undef INSERT_STRING