2 Copyright (c) 1990-2002 Info-ZIP. All rights reserved.
4 See the accompanying file LICENSE, version 2000-Apr-09 or later
5 (the contents of which are also included in unzip.h) for terms of use.
6 If, for some reason, all these files are missing, the Info-ZIP license
7 also may be found at: ftp://ftp.info-zip.org/pub/infozip/license.html
10 *** This is only needed to build funzip correctly and is a direct copy
11 of inflate.c. Don't forget to update here if you update inflate.c!!
12 See inflate.c for more information ***
15 #define PKZIP_BUG_WORKAROUND /* PKZIP 1.93a problem--live with it */
18 #define __INFLATE_C /* identifies this source module */
21 #define INFMOD /* tell inflate.h to include code to be compiled */
25 /* marker for "unused" huft code, and corresponding check macro */
26 #define INVALID_CODE 99
27 #define IS_INVALID_CODE(c) ((c) == INVALID_CODE)
29 #ifndef WSIZE /* default is 32K resp. 64K */
31 # define WSIZE 65536L /* window size--must be a power of two, and */
32 # else /* at least 64K for PKZip's deflate64 method */
33 # define WSIZE 0x8000 /* window size--must be a power of two, and */
34 # endif /* at least 32K for zip's deflate method */
37 /* some buffer counters must be capable of holding 64k for Deflate64 */
38 #if (defined(USE_DEFLATE64) && defined(INT_16BIT))
41 # define UINT_D64 unsigned
44 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
45 # define wsize G._wsize /* wsize is a variable */
47 # define wsize WSIZE /* wsize is a constant */
51 #ifndef NEXTBYTE /* default is to simply get a byte from stdin */
52 # define NEXTBYTE getchar()
55 #ifndef MESSAGE /* only used twice, for fixed strings--NOT general-purpose */
56 # define MESSAGE(str,len,flag) fprintf(stderr,(char *)(str))
59 #ifndef FLUSH /* default is to simply write the buffer to stdout */
61 (((extent)fwrite(redirSlide, 1, (extent)(n), stdout) == (extent)(n)) ? \
64 /* Warning: the fwrite above might not work on 16-bit compilers, since
65 0x8000 might be interpreted as -32,768 by the library function. When
66 support for Deflate64 is enabled, the window size is 64K and the
67 simple fwrite statement is definitely broken for 16-bit compilers. */
71 # define Trace(x) fprintf x
78 /*---------------------------------------------------------------------------*/
83 GRR: return values for both original inflate() and UZinflate()
90 /**************************/
91 /* Function UZinflate() */
92 /**************************/
94 int UZinflate(__G__ is_defl64
)
97 /* decompress an inflated entry using the zlib routines */
99 int retval
= 0; /* return code: 0 = "no error" */
102 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
103 if (G
.redirect_slide
)
104 wsize
= G
.redirect_size
, redirSlide
= G
.redirect_buffer
;
106 wsize
= WSIZE
, redirSlide
= slide
;
109 G
.dstrm
.next_out
= redirSlide
;
110 G
.dstrm
.avail_out
= wsize
;
112 G
.dstrm
.next_in
= G
.inptr
;
113 G
.dstrm
.avail_in
= G
.incnt
;
119 /* only need to test this stuff once */
120 if (zlib_version
[0] != ZLIB_VERSION
[0]) {
121 Info(slide
, 0x21, ((char *)slide
,
122 "error: incompatible zlib version (expected %s, found %s)\n",
123 ZLIB_VERSION
, zlib_version
));
125 } else if (strcmp(zlib_version
, ZLIB_VERSION
) != 0)
126 Info(slide
, 0x21, ((char *)slide
,
127 "warning: different zlib version (expected %s, using %s)\n",
128 ZLIB_VERSION
, zlib_version
));
130 /* windowBits = log2(wsize) */
131 for (i
= (unsigned)wsize
, windowBits
= 0;
132 !(i
& 1); i
>>= 1, ++windowBits
);
133 if ((unsigned)windowBits
> (unsigned)15)
135 else if (windowBits
< 8)
138 G
.dstrm
.zalloc
= (alloc_func
)Z_NULL
;
139 G
.dstrm
.zfree
= (free_func
)Z_NULL
;
141 Trace((stderr
, "initializing inflate()\n"));
142 err
= inflateInit2(&G
.dstrm
, -windowBits
);
144 if (err
== Z_MEM_ERROR
)
146 else if (err
!= Z_OK
)
147 Trace((stderr
, "oops! (inflateInit2() err = %d)\n", err
));
152 while (err
!= Z_STREAM_END
) {
154 while (G
.csize
> 0) {
155 Trace((stderr
, "first loop: G.csize = %ld\n", G
.csize
));
157 while (G
.dstrm
.avail_out
> 0) {
158 err
= inflate(&G
.dstrm
, Z_PARTIAL_FLUSH
);
160 if (err
== Z_DATA_ERROR
) {
161 retval
= 2; goto uzinflate_cleanup_exit
;
162 } else if (err
== Z_MEM_ERROR
) {
163 retval
= 3; goto uzinflate_cleanup_exit
;
164 } else if (err
!= Z_OK
&& err
!= Z_STREAM_END
)
165 Trace((stderr
, "oops! (inflate(first loop) err = %d)\n", err
));
168 if (err
== Z_STREAM_END
) /* "END-of-entry-condition" ? */
170 if (G
.csize
<= 0L) /* "END-of-entry-condition" ? */
174 if (G
.dstrm
.avail_in
<= 0) {
175 if (fillinbuf(__G
) == 0) {
176 /* no "END-condition" yet, but no more data */
177 retval
= 2; goto uzinflate_cleanup_exit
;
180 G
.dstrm
.next_in
= G
.inptr
;
181 G
.dstrm
.avail_in
= G
.incnt
;
183 Trace((stderr
, " avail_in = %d\n", G
.dstrm
.avail_in
));
186 if ((retval
= FLUSH(wsize
- G
.dstrm
.avail_out
)) != 0)
187 goto uzinflate_cleanup_exit
;
188 Trace((stderr
, "inside loop: flushing %ld bytes (ptr diff = %ld)\n",
189 (long)(wsize
- G
.dstrm
.avail_out
),
190 (long)(G
.dstrm
.next_out
-(Bytef
*)redirSlide
)));
191 G
.dstrm
.next_out
= redirSlide
;
192 G
.dstrm
.avail_out
= wsize
;
195 /* no more input, so loop until we have all output */
196 Trace((stderr
, "beginning final loop: err = %d\n", err
));
197 while (err
!= Z_STREAM_END
) {
198 err
= inflate(&G
.dstrm
, Z_PARTIAL_FLUSH
);
199 if (err
== Z_DATA_ERROR
) {
200 retval
= 2; goto uzinflate_cleanup_exit
;
201 } else if (err
== Z_MEM_ERROR
) {
202 retval
= 3; goto uzinflate_cleanup_exit
;
203 } else if (err
== Z_BUF_ERROR
) { /* DEBUG */
205 "zlib inflate() did not detect stream end (%s, %s)\n",
206 G
.zipfn
, G
.filename
));
208 } else if (err
!= Z_OK
&& err
!= Z_STREAM_END
) {
209 Trace((stderr
, "oops! (inflate(final loop) err = %d)\n", err
));
213 /* final flush of slide[] */
214 if ((retval
= FLUSH(wsize
- G
.dstrm
.avail_out
)) != 0)
215 goto uzinflate_cleanup_exit
;
216 Trace((stderr
, "final loop: flushing %ld bytes (ptr diff = %ld)\n",
217 (long)(wsize
- G
.dstrm
.avail_out
),
218 (long)(G
.dstrm
.next_out
-(Bytef
*)redirSlide
)));
219 G
.dstrm
.next_out
= redirSlide
;
220 G
.dstrm
.avail_out
= wsize
;
222 Trace((stderr
, "total in = %ld, total out = %ld\n", G
.dstrm
.total_in
,
225 G
.inptr
= (uch
*)G
.dstrm
.next_in
;
226 G
.incnt
= (G
.inbuf
+ INBUFSIZ
) - G
.inptr
; /* reset for other routines */
228 uzinflate_cleanup_exit
:
229 err
= inflateReset(&G
.dstrm
);
231 Trace((stderr
, "oops! (inflateReset() err = %d)\n", err
));
237 /*---------------------------------------------------------------------------*/
238 #else /* !USE_ZLIB */
241 /* Function prototypes */
249 int inflate_codes
OF((__GPRO__
struct huft
*tl
, struct huft
*td
,
251 static int inflate_stored
OF((__GPRO
));
252 static int inflate_fixed
OF((__GPRO
));
253 static int inflate_dynamic
OF((__GPRO
));
254 static int inflate_block
OF((__GPRO__
int *e
));
257 /* The inflate algorithm uses a sliding 32K byte window on the uncompressed
258 stream to find repeated byte strings. This is implemented here as a
259 circular buffer. The index is updated simply by incrementing and then
260 and'ing with 0x7fff (32K-1). */
261 /* It is left to other modules to supply the 32K area. It is assumed
262 to be usable as if it were declared "uch slide[32768];" or as just
263 "uch *slide;" and then malloc'ed in the latter case. The definition
264 must be in unzip.h, included above. */
267 /* unsigned wp; moved to globals.h */ /* current position in slide */
269 /* Tables for deflate from PKZIP's appnote.txt. */
270 /* - Order of the bit length code lengths */
271 static ZCONST
unsigned border
[] = {
272 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
274 /* - Copy lengths for literal codes 257..285 */
276 static ZCONST ush cplens64
[] = {
277 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
278 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 3, 0, 0};
279 /* For Deflate64, the code 285 is defined differently. */
281 # define cplens32 cplens
283 static ZCONST ush cplens32
[] = {
284 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
285 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
286 /* note: see note #13 above about the 258 in this list. */
287 /* - Extra bits for literal codes 257..285 */
289 static ZCONST uch cplext64
[] = {
290 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
291 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 16, INVALID_CODE
, INVALID_CODE
};
293 # define cplext32 cplext
295 static ZCONST uch cplext32
[] = {
296 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
297 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, INVALID_CODE
, INVALID_CODE
};
299 /* - Copy offsets for distance codes 0..29 (0..31 for Deflate64) */
300 static ZCONST ush cpdist
[] = {
301 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
302 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
303 #if (defined(USE_DEFLATE64) || defined(PKZIP_BUG_WORKAROUND))
304 8193, 12289, 16385, 24577, 32769, 49153};
306 8193, 12289, 16385, 24577};
309 /* - Extra bits for distance codes 0..29 (0..31 for Deflate64) */
311 static ZCONST uch cpdext64
[] = {
312 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
313 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
314 12, 12, 13, 13, 14, 14};
316 # define cpdext32 cpdext
318 static ZCONST uch cpdext32
[] = {
319 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
320 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
321 #ifdef PKZIP_BUG_WORKAROUND
322 12, 12, 13, 13, INVALID_CODE
, INVALID_CODE
};
327 #ifdef PKZIP_BUG_WORKAROUND
328 # define MAXLITLENS 288
330 # define MAXLITLENS 286
332 #if (defined(USE_DEFLATE64) || defined(PKZIP_BUG_WORKAROUND))
339 /* moved to consts.h (included in unzip.c), resp. funzip.c */
341 /* And'ing with mask_bits[n] masks the lower n bits */
342 ZCONST ush near mask_bits
[] = {
344 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
345 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
350 /* Macros for inflate() bit peeking and grabbing.
354 x = b & mask_bits[j];
357 where NEEDBITS makes sure that b has at least j bits in it, and
358 DUMPBITS removes the bits from b. The macros use the variable k
359 for the number of bits in b. Normally, b and k are register
360 variables for speed and are initialized at the begining of a
361 routine that uses these macros from a global bit buffer and count.
363 In order to not ask for more bits than there are in the compressed
364 stream, the Huffman tables are constructed to only ask for just
365 enough bits to make up the end-of-block code (value 256). Then no
366 bytes need to be "returned" to the buffer at the end of the last
367 block. See the huft_build() routine.
370 /* These have been moved to globals.h */
372 ulg bb
; /* bit buffer */
373 unsigned bk
; /* bits in bit buffer */
377 # define CHECK_EOF /* default as of 5.13/5.2 */
381 # define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE)<<k;k+=8;}}
383 # define NEEDBITS(n) {while(k<(n)){int c=NEXTBYTE;\
384 if(c==EOF){retval=1;goto cleanup_and_exit;}\
385 b|=((ulg)c)<<k;k+=8;}}
388 #define DUMPBITS(n) {b>>=(n);k-=(n);}
392 Huffman code decoding is performed using a multi-level table lookup.
393 The fastest way to decode is to simply build a lookup table whose
394 size is determined by the longest code. However, the time it takes
395 to build this table can also be a factor if the data being decoded
396 are not very long. The most common codes are necessarily the
397 shortest codes, so those codes dominate the decoding time, and hence
398 the speed. The idea is you can have a shorter table that decodes the
399 shorter, more probable codes, and then point to subsidiary tables for
400 the longer codes. The time it costs to decode the longer codes is
401 then traded against the time it takes to make longer tables.
403 This results of this trade are in the variables lbits and dbits
404 below. lbits is the number of bits the first level table for literal/
405 length codes can decode in one step, and dbits is the same thing for
406 the distance codes. Subsequent tables are also less than or equal to
407 those sizes. These values may be adjusted either when all of the
408 codes are shorter than that, in which case the longest code length in
409 bits is used, or when the shortest code is *longer* than the requested
410 table size, in which case the length of the shortest code in bits is
413 There are two different values for the two tables, since they code a
414 different number of possibilities each. The literal/length table
415 codes 286 possible values, or in a flat code, a little over eight
416 bits. The distance table codes 30 possible values, or a little less
417 than five bits, flat. The optimum values for speed end up being
418 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
419 The optimum values may differ though from machine to machine, and
420 possibly even between compilers. Your mileage may vary.
424 static ZCONST
int lbits
= 9; /* bits in base literal/length lookup table */
425 static ZCONST
int dbits
= 6; /* bits in base distance lookup table */
428 #ifndef ASM_INFLATECODES
430 int inflate_codes(__G__ tl
, td
, bl
, bd
)
432 struct huft
*tl
, *td
; /* literal/length and distance decoder tables */
433 int bl
, bd
; /* number of bits decoded by tl[] and td[] */
434 /* inflate (decompress) the codes in a deflated (compressed) block.
435 Return an error code or zero if it all goes ok. */
437 register unsigned e
; /* table entry flag/number of extra bits */
438 unsigned d
; /* index for copy */
439 UINT_D64 n
; /* length for copy (deflate64: might be 64k+2) */
440 UINT_D64 w
; /* current window position (deflate64: up to 64k) */
441 struct huft
*t
; /* pointer to table entry */
442 unsigned ml
, md
; /* masks for bl and bd bits */
443 register ulg b
; /* bit buffer */
444 register unsigned k
; /* number of bits in bit buffer */
445 int retval
= 0; /* error code returned: initialized to "no error" */
448 /* make local copies of globals */
449 b
= G
.bb
; /* initialize bit buffer */
451 w
= G
.wp
; /* initialize window position */
454 /* inflate the coded data */
455 ml
= mask_bits
[bl
]; /* precompute masks for speed */
457 while (1) /* do until end of block */
459 NEEDBITS((unsigned)bl
)
460 t
= tl
+ ((unsigned)b
& ml
);
464 if ((e
= t
->e
) == 32) /* then it's a literal */
466 redirSlide
[w
++] = (uch
)t
->v
.n
;
469 if ((retval
= FLUSH(w
)) != 0) goto cleanup_and_exit
;
475 if (e
< 31) /* then it's a length */
477 /* get length of block to copy */
479 n
= t
->v
.n
+ ((unsigned)b
& mask_bits
[e
]);
482 /* decode distance of block to copy */
483 NEEDBITS((unsigned)bd
)
484 t
= td
+ ((unsigned)b
& md
);
489 if (IS_INVALID_CODE(e
))
493 t
= t
->v
.t
+ ((unsigned)b
& mask_bits
[e
]);
496 d
= (unsigned)w
- t
->v
.n
- ((unsigned)b
& mask_bits
[e
]);
501 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
502 if (G
.redirect_slide
) {
503 /* &= w/ wsize unnecessary & wrong if redirect */
504 if ((UINT_D64
)d
>= wsize
)
505 return 1; /* invalid compressed data */
506 e
= (unsigned)(wsize
- (d
> (unsigned)w
? (UINT_D64
)d
: w
));
510 e
= (unsigned)(wsize
-
511 ((d
&= (unsigned)(wsize
-1)) > (unsigned)w
?
513 if ((UINT_D64
)e
> n
) e
= (unsigned)n
;
516 if ((unsigned)w
- d
>= e
)
517 /* (this test assumes unsigned comparison) */
519 memcpy(redirSlide
+ (unsigned)w
, redirSlide
+ d
, e
);
523 else /* do it slowly to avoid memcpy() overlap */
524 #endif /* !NOMEMCPY */
526 redirSlide
[w
++] = redirSlide
[d
++];
530 if ((retval
= FLUSH(w
)) != 0) goto cleanup_and_exit
;
537 if (e
== 31) /* it's the EOB signal */
539 /* sorry for this goto, but we have to exit two loops at once */
543 if (IS_INVALID_CODE(e
))
548 t
= t
->v
.t
+ ((unsigned)b
& mask_bits
[e
]);
553 /* restore the globals from the locals */
554 G
.wp
= (unsigned)w
; /* restore global window pointer */
555 G
.bb
= b
; /* restore global bit buffer */
564 #endif /* ASM_INFLATECODES */
568 static int inflate_stored(__G
)
570 /* "decompress" an inflated type 0 (stored) block. */
572 UINT_D64 w
; /* current window position (deflate64: up to 64k!) */
573 unsigned n
; /* number of bytes in block */
574 register ulg b
; /* bit buffer */
575 register unsigned k
; /* number of bits in bit buffer */
576 int retval
= 0; /* error code returned: initialized to "no error" */
579 /* make local copies of globals */
580 Trace((stderr
, "\nstored block"));
581 b
= G
.bb
; /* initialize bit buffer */
583 w
= G
.wp
; /* initialize window position */
586 /* go to byte boundary */
591 /* get the length and its complement */
593 n
= ((unsigned)b
& 0xffff);
596 if (n
!= (unsigned)((~b
) & 0xffff))
597 return 1; /* error in compressed data */
601 /* read and output the compressed data */
605 redirSlide
[w
++] = (uch
)b
;
608 if ((retval
= FLUSH(w
)) != 0) goto cleanup_and_exit
;
615 /* restore the globals from the locals */
616 G
.wp
= (unsigned)w
; /* restore global window pointer */
617 G
.bb
= b
; /* restore global bit buffer */
625 /* Globals for literal tables (built once) */
626 /* Moved to globals.h */
628 struct huft
*fixed_tl
= (struct huft
*)NULL
;
629 struct huft
*fixed_td
;
630 int fixed_bl
, fixed_bd
;
633 static int inflate_fixed(__G
)
635 /* decompress an inflated type 1 (fixed Huffman codes) block. We should
636 either replace this with a custom decoder, or at least precompute the
639 /* if first time, set up tables for fixed blocks */
640 Trace((stderr
, "\nliteral block"));
641 if (G
.fixed_tl
== (struct huft
*)NULL
)
643 int i
; /* temporary variable */
644 unsigned l
[288]; /* length list for huft_build */
647 for (i
= 0; i
< 144; i
++)
653 for (; i
< 288; i
++) /* make a complete, but wrong code set */
657 if ((i
= huft_build(__G__ l
, 288, 257, G
.cplens
, G
.cplext
,
658 &G
.fixed_tl
, &G
.fixed_bl
)) != 0)
660 if ((i
= huft_build(__G__ l
, 288, 257, cplens
, cplext
,
661 &G
.fixed_tl
, &G
.fixed_bl
)) != 0)
664 G
.fixed_tl
= (struct huft
*)NULL
;
669 for (i
= 0; i
< MAXDISTS
; i
++) /* make an incomplete code set */
673 if ((i
= huft_build(__G__ l
, MAXDISTS
, 0, cpdist
, G
.cpdext
,
674 &G
.fixed_td
, &G
.fixed_bd
)) > 1)
676 if ((i
= huft_build(__G__ l
, MAXDISTS
, 0, cpdist
, cpdext
,
677 &G
.fixed_td
, &G
.fixed_bd
)) > 1)
680 huft_free(G
.fixed_tl
);
681 G
.fixed_td
= G
.fixed_tl
= (struct huft
*)NULL
;
686 /* decompress until an end-of-block code */
687 return inflate_codes(__G__ G
.fixed_tl
, G
.fixed_td
,
688 G
.fixed_bl
, G
.fixed_bd
);
693 static int inflate_dynamic(__G
)
695 /* decompress an inflated type 2 (dynamic Huffman codes) block. */
697 int i
; /* temporary variables */
699 unsigned l
; /* last length */
700 unsigned m
; /* mask for bit lengths table */
701 unsigned n
; /* number of lengths to get */
702 struct huft
*tl
; /* literal/length code table */
703 struct huft
*td
; /* distance code table */
704 int bl
; /* lookup bits for tl */
705 int bd
; /* lookup bits for td */
706 unsigned nb
; /* number of bit length codes */
707 unsigned nl
; /* number of literal/length codes */
708 unsigned nd
; /* number of distance codes */
709 unsigned ll
[MAXLITLENS
+MAXDISTS
]; /* lit./length and distance code lengths */
710 register ulg b
; /* bit buffer */
711 register unsigned k
; /* number of bits in bit buffer */
712 int retval
= 0; /* error code returned: initialized to "no error" */
715 /* make local bit buffer */
716 Trace((stderr
, "\ndynamic block"));
721 /* read in table lengths */
723 nl
= 257 + ((unsigned)b
& 0x1f); /* number of literal/length codes */
726 nd
= 1 + ((unsigned)b
& 0x1f); /* number of distance codes */
729 nb
= 4 + ((unsigned)b
& 0xf); /* number of bit length codes */
731 if (nl
> MAXLITLENS
|| nd
> MAXDISTS
)
732 return 1; /* bad lengths */
735 /* read in bit-length-code lengths */
736 for (j
= 0; j
< nb
; j
++)
739 ll
[border
[j
]] = (unsigned)b
& 7;
746 /* build decoding table for trees--single level, 7 bit lookup */
748 retval
= huft_build(__G__ ll
, 19, 19, NULL
, NULL
, &tl
, &bl
);
749 if (bl
== 0) /* no bit lengths */
755 return retval
; /* incomplete code set */
759 /* read in literal and distance code lengths */
763 while ((unsigned)i
< n
)
765 NEEDBITS((unsigned)bl
)
766 j
= (td
= tl
+ ((unsigned)b
& m
))->b
;
769 if (j
< 16) /* length of code in bits (0..15) */
770 ll
[i
++] = l
= j
; /* save last length in l */
771 else if (j
== 16) /* repeat last length 3 to 6 times */
774 j
= 3 + ((unsigned)b
& 3);
776 if ((unsigned)i
+ j
> n
)
781 else if (j
== 17) /* 3 to 10 zero length codes */
784 j
= 3 + ((unsigned)b
& 7);
786 if ((unsigned)i
+ j
> n
)
792 else /* j == 18: 11 to 138 zero length codes */
795 j
= 11 + ((unsigned)b
& 0x7f);
797 if ((unsigned)i
+ j
> n
)
806 /* free decoding table for trees */
810 /* restore the global bit buffer */
815 /* build the decoding tables for literal/length and distance codes */
818 retval
= huft_build(__G__ ll
, nl
, 257, G
.cplens
, G
.cplext
, &tl
, &bl
);
820 retval
= huft_build(__G__ ll
, nl
, 257, cplens
, cplext
, &tl
, &bl
);
822 if (bl
== 0) /* no literals or lengths */
828 MESSAGE((uch
*)"(incomplete l-tree) ", 21L, 1);
831 return retval
; /* incomplete code set */
835 retval
= huft_build(__G__ ll
+ nl
, nd
, 0, cpdist
, G
.cpdext
, &td
, &bd
);
837 retval
= huft_build(__G__ ll
+ nl
, nd
, 0, cpdist
, cpdext
, &td
, &bd
);
839 #ifdef PKZIP_BUG_WORKAROUND
843 if (bd
== 0 && nl
> 257) /* lengths but no distances */
849 MESSAGE((uch
*)"(incomplete d-tree) ", 21L, 1);
856 /* decompress until an end-of-block code */
857 retval
= inflate_codes(__G__ tl
, td
, bl
, bd
);
860 /* free the decoding tables, return */
868 static int inflate_block(__G__ e
)
870 int *e
; /* last block flag */
871 /* decompress an inflated block */
873 unsigned t
; /* block type */
874 register ulg b
; /* bit buffer */
875 register unsigned k
; /* number of bits in bit buffer */
876 int retval
= 0; /* error code returned: initialized to "no error" */
879 /* make local bit buffer */
884 /* read in last block bit */
890 /* read in block type */
896 /* restore the global bit buffer */
901 /* inflate that block type */
903 return inflate_dynamic(__G
);
905 return inflate_stored(__G
);
907 return inflate_fixed(__G
);
919 int inflate(__G__ is_defl64
)
922 /* decompress an inflated entry */
924 int e
; /* last block flag */
925 int r
; /* result code */
927 unsigned h
= 0; /* maximum struct huft's malloc'ed */
930 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
931 if (G
.redirect_slide
)
932 wsize
= G
.redirect_size
, redirSlide
= G
.redirect_buffer
;
934 wsize
= WSIZE
, redirSlide
= slide
; /* how they're #defined if !DLL */
937 /* initialize window, bit buffer */
947 G
.fixed_tl
= G
.fixed_tl64
;
948 G
.fixed_bl
= G
.fixed_bl64
;
949 G
.fixed_td
= G
.fixed_td64
;
950 G
.fixed_bd
= G
.fixed_bd64
;
955 G
.fixed_tl
= G
.fixed_tl32
;
956 G
.fixed_bl
= G
.fixed_bl32
;
957 G
.fixed_td
= G
.fixed_td32
;
958 G
.fixed_bd
= G
.fixed_bd32
;
960 #else /* !USE_DEFLATE64 */
962 /* This should not happen unless UnZip is built from object files
963 * compiled with inconsistent option setting. Handle this by
964 * returning with "bad input" error code.
966 Trace((stderr
, "\nThis inflate() cannot handle Deflate64!\n"));
969 #endif /* ?USE_DEFLATE64 */
971 /* decompress until the last block */
976 if ((r
= inflate_block(__G__
&e
)) != 0)
984 Trace((stderr
, "\n%u bytes in Huffman tables (%u/entry)\n",
985 h
* (unsigned)sizeof(struct huft
), (unsigned)sizeof(struct huft
)));
989 G
.fixed_tl64
= G
.fixed_tl
;
990 G
.fixed_bl64
= G
.fixed_bl
;
991 G
.fixed_td64
= G
.fixed_td
;
992 G
.fixed_bd64
= G
.fixed_bd
;
994 G
.fixed_tl32
= G
.fixed_tl
;
995 G
.fixed_bl32
= G
.fixed_bl
;
996 G
.fixed_td32
= G
.fixed_td
;
997 G
.fixed_bd32
= G
.fixed_bd
;
1001 /* flush out redirSlide and return (success, unless final FLUSH failed) */
1002 return (FLUSH(G
.wp
));
1007 int inflate_free(__G
)
1010 if (G
.fixed_tl
!= (struct huft
*)NULL
)
1012 huft_free(G
.fixed_td
);
1013 huft_free(G
.fixed_tl
);
1014 G
.fixed_td
= G
.fixed_tl
= (struct huft
*)NULL
;
1019 #endif /* ?USE_ZLIB */
1023 * GRR: moved huft_build() and huft_free() down here; used by explode()
1024 * and fUnZip regardless of whether USE_ZLIB defined or not
1028 /* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
1029 #define BMAX 16 /* maximum bit length of any code (16 for explode) */
1030 #define N_MAX 288 /* maximum number of codes in any set */
1033 int huft_build(__G__ b
, n
, s
, d
, e
, t
, m
)
1035 ZCONST
unsigned *b
; /* code lengths in bits (all assumed <= BMAX) */
1036 unsigned n
; /* number of codes (assumed <= N_MAX) */
1037 unsigned s
; /* number of simple-valued codes (0..s-1) */
1038 ZCONST ush
*d
; /* list of base values for non-simple codes */
1039 ZCONST uch
*e
; /* list of extra bits for non-simple codes */
1040 struct huft
**t
; /* result: starting table */
1041 int *m
; /* maximum lookup bits, returns actual */
1042 /* Given a list of code lengths and a maximum table size, make a set of
1043 tables to decode that set of codes. Return zero on success, one if
1044 the given code set is incomplete (the tables are still built in this
1045 case), two if the input is invalid (all zero length codes or an
1046 oversubscribed set of lengths), and three if not enough memory.
1047 The code with value 256 is special, and the tables are constructed
1048 so that no bits beyond that code are fetched when that code is
1051 unsigned a
; /* counter for codes of length k */
1052 unsigned c
[BMAX
+1]; /* bit length count table */
1053 unsigned el
; /* length of EOB code (value 256) */
1054 unsigned f
; /* i repeats in table every f entries */
1055 int g
; /* maximum code length */
1056 int h
; /* table level */
1057 register unsigned i
; /* counter, current code */
1058 register unsigned j
; /* counter */
1059 register int k
; /* number of bits in current code */
1060 int lx
[BMAX
+1]; /* memory for l[-1..BMAX-1] */
1061 int *l
= lx
+1; /* stack of bits per table */
1062 register unsigned *p
; /* pointer into c[], b[], or v[] */
1063 register struct huft
*q
; /* points to current table */
1064 struct huft r
; /* table entry for structure assignment */
1065 struct huft
*u
[BMAX
]; /* table stack */
1066 unsigned v
[N_MAX
]; /* values in order of bit length */
1067 register int w
; /* bits before this table == (l * h) */
1068 unsigned x
[BMAX
+1]; /* bit offsets, then code stack */
1069 unsigned *xp
; /* pointer into x */
1070 int y
; /* number of dummy codes added */
1071 unsigned z
; /* number of entries in current table */
1074 /* Generate counts for each bit length */
1075 el
= n
> 256 ? b
[256] : BMAX
; /* set length of EOB code, if any */
1076 memzero((char *)c
, sizeof(c
));
1077 p
= (unsigned *)b
; i
= n
;
1079 c
[*p
]++; p
++; /* assume all entries <= BMAX */
1081 if (c
[0] == n
) /* null input--all zero length codes */
1083 *t
= (struct huft
*)NULL
;
1089 /* Find minimum and maximum length, bound *m by those */
1090 for (j
= 1; j
<= BMAX
; j
++)
1093 k
= j
; /* minimum code length */
1094 if ((unsigned)*m
< j
)
1096 for (i
= BMAX
; i
; i
--)
1099 g
= i
; /* maximum code length */
1100 if ((unsigned)*m
> i
)
1104 /* Adjust last length count to fill out codes, if needed */
1105 for (y
= 1 << j
; j
< i
; j
++, y
<<= 1)
1106 if ((y
-= c
[j
]) < 0)
1107 return 2; /* bad input: more codes than bits */
1108 if ((y
-= c
[i
]) < 0)
1113 /* Generate starting offsets into the value table for each length */
1115 p
= c
+ 1; xp
= x
+ 2;
1116 while (--i
) { /* note that i == g from above */
1117 *xp
++ = (j
+= *p
++);
1121 /* Make a table of values in order of bit lengths */
1122 memzero((char *)v
, sizeof(v
));
1123 p
= (unsigned *)b
; i
= 0;
1125 if ((j
= *p
++) != 0)
1128 n
= x
[g
]; /* set n to length of v */
1131 /* Generate the Huffman codes and for each, make the table entries */
1132 x
[0] = i
= 0; /* first Huffman code is zero */
1133 p
= v
; /* grab values in bit order */
1134 h
= -1; /* no tables yet--level -1 */
1135 w
= l
[-1] = 0; /* no bits decoded yet */
1136 u
[0] = (struct huft
*)NULL
; /* just to keep compilers happy */
1137 q
= (struct huft
*)NULL
; /* ditto */
1140 /* go through the bit lengths (k already is bits in shortest code) */
1146 /* here i is the Huffman code of length k bits for value *p */
1147 /* make tables up to required level */
1148 while (k
> w
+ l
[h
])
1150 w
+= l
[h
++]; /* add bits already decoded */
1152 /* compute minimum size table less than or equal to *m bits */
1153 z
= (z
= g
- w
) > (unsigned)*m
? *m
: z
; /* upper limit */
1154 if ((f
= 1 << (j
= k
- w
)) > a
+ 1) /* try a k-w bit table */
1155 { /* too few codes for k-w bit table */
1156 f
-= a
+ 1; /* deduct codes from patterns left */
1158 while (++j
< z
) /* try smaller tables up to z bits */
1160 if ((f
<<= 1) <= *++xp
)
1161 break; /* enough codes to use up j bits */
1162 f
-= *xp
; /* else deduct codes from patterns */
1165 if ((unsigned)w
+ j
> el
&& (unsigned)w
< el
)
1166 j
= el
- w
; /* make EOB code end at table */
1167 z
= 1 << j
; /* table entries for j-bit table */
1168 l
[h
] = j
; /* set table size in stack */
1170 /* allocate and link in new table */
1171 if ((q
= (struct huft
*)malloc((z
+ 1)*sizeof(struct huft
))) ==
1172 (struct huft
*)NULL
)
1176 return 3; /* not enough memory */
1179 G
.hufts
+= z
+ 1; /* track memory usage */
1181 *t
= q
+ 1; /* link to list for huft_free() */
1182 *(t
= &(q
->v
.t
)) = (struct huft
*)NULL
;
1183 u
[h
] = ++q
; /* table starts after link */
1185 /* connect to last table, if there is one */
1188 x
[h
] = i
; /* save pattern for backing up */
1189 r
.b
= (uch
)l
[h
-1]; /* bits to dump before this table */
1190 r
.e
= (uch
)(32 + j
); /* bits in this table */
1191 r
.v
.t
= q
; /* pointer to this table */
1192 j
= (i
& ((1 << w
) - 1)) >> (w
- l
[h
-1]);
1193 u
[h
-1][j
] = r
; /* connect to last table */
1197 /* set up table entry in r */
1200 r
.e
= INVALID_CODE
; /* out of values--invalid code */
1203 r
.e
= (uch
)(*p
< 256 ? 32 : 31); /* 256 is end-of-block code */
1204 r
.v
.n
= (ush
)*p
++; /* simple code is just the value */
1208 r
.e
= e
[*p
- s
]; /* non-simple--look up in lists */
1209 r
.v
.n
= d
[*p
++ - s
];
1212 /* fill code-like entries with r */
1214 for (j
= i
>> w
; j
< z
; j
+= f
)
1217 /* backwards increment the k-bit code i */
1218 for (j
= 1 << (k
- 1); i
& j
; j
>>= 1)
1222 /* backup over finished tables */
1223 while ((i
& ((1 << w
) - 1)) != x
[h
])
1224 w
-= l
[--h
]; /* don't need to update q */
1229 /* return actual size of base table */
1233 /* Return true (1) if we were given an incomplete table */
1234 return y
!= 0 && g
!= 1;
1240 struct huft
*t
; /* table to free */
1241 /* Free the malloc'ed tables built by huft_build(), which makes a linked
1242 list of the tables it made, with the links in a dummy first entry of
1245 register struct huft
*p
, *q
;
1248 /* Go through linked list, freeing from the malloced (t[-1]) address. */
1250 while (p
!= (struct huft
*)NULL
)