vfs: check userland buffers before reading them.
[haiku.git] / src / bin / unzip / inflatef.c
blob141653a8b165bfed9e0d3f32c945372a8474faab
1 /*
2 Copyright (c) 1990-2002 Info-ZIP. All rights reserved.
4 See the accompanying file LICENSE, version 2000-Apr-09 or later
5 (the contents of which are also included in unzip.h) for terms of use.
6 If, for some reason, all these files are missing, the Info-ZIP license
7 also may be found at: ftp://ftp.info-zip.org/pub/infozip/license.html
8 */
9 /* inflatef.c
10 *** This is only needed to build funzip correctly and is a direct copy
11 of inflate.c. Don't forget to update here if you update inflate.c!!
12 See inflate.c for more information ***
15 #define PKZIP_BUG_WORKAROUND /* PKZIP 1.93a problem--live with it */
17 #define FUNZIP
18 #define __INFLATE_C /* identifies this source module */
20 /* #define DEBUG */
21 #define INFMOD /* tell inflate.h to include code to be compiled */
22 #include "inflate.h"
25 /* marker for "unused" huft code, and corresponding check macro */
26 #define INVALID_CODE 99
27 #define IS_INVALID_CODE(c) ((c) == INVALID_CODE)
29 #ifndef WSIZE /* default is 32K resp. 64K */
30 # ifdef USE_DEFLATE64
31 # define WSIZE 65536L /* window size--must be a power of two, and */
32 # else /* at least 64K for PKZip's deflate64 method */
33 # define WSIZE 0x8000 /* window size--must be a power of two, and */
34 # endif /* at least 32K for zip's deflate method */
35 #endif
37 /* some buffer counters must be capable of holding 64k for Deflate64 */
38 #if (defined(USE_DEFLATE64) && defined(INT_16BIT))
39 # define UINT_D64 ulg
40 #else
41 # define UINT_D64 unsigned
42 #endif
44 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
45 # define wsize G._wsize /* wsize is a variable */
46 #else
47 # define wsize WSIZE /* wsize is a constant */
48 #endif
51 #ifndef NEXTBYTE /* default is to simply get a byte from stdin */
52 # define NEXTBYTE getchar()
53 #endif
55 #ifndef MESSAGE /* only used twice, for fixed strings--NOT general-purpose */
56 # define MESSAGE(str,len,flag) fprintf(stderr,(char *)(str))
57 #endif
59 #ifndef FLUSH /* default is to simply write the buffer to stdout */
60 # define FLUSH(n) \
61 (((extent)fwrite(redirSlide, 1, (extent)(n), stdout) == (extent)(n)) ? \
62 0 : PKDISK)
63 #endif
64 /* Warning: the fwrite above might not work on 16-bit compilers, since
65 0x8000 might be interpreted as -32,768 by the library function. When
66 support for Deflate64 is enabled, the window size is 64K and the
67 simple fwrite statement is definitely broken for 16-bit compilers. */
69 #ifndef Trace
70 # ifdef DEBUG
71 # define Trace(x) fprintf x
72 # else
73 # define Trace(x)
74 # endif
75 #endif
78 /*---------------------------------------------------------------------------*/
79 #ifdef USE_ZLIB
83 GRR: return values for both original inflate() and UZinflate()
84 0 OK
85 1 incomplete table(?)
86 2 bad input
87 3 not enough memory
90 /**************************/
91 /* Function UZinflate() */
92 /**************************/
94 int UZinflate(__G__ is_defl64)
95 __GDEF
96 int is_defl64;
97 /* decompress an inflated entry using the zlib routines */
99 int retval = 0; /* return code: 0 = "no error" */
100 int err=Z_OK;
102 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
103 if (G.redirect_slide)
104 wsize = G.redirect_size, redirSlide = G.redirect_buffer;
105 else
106 wsize = WSIZE, redirSlide = slide;
107 #endif
109 G.dstrm.next_out = redirSlide;
110 G.dstrm.avail_out = wsize;
112 G.dstrm.next_in = G.inptr;
113 G.dstrm.avail_in = G.incnt;
115 if (!G.inflInit) {
116 unsigned i;
117 int windowBits;
119 /* only need to test this stuff once */
120 if (zlib_version[0] != ZLIB_VERSION[0]) {
121 Info(slide, 0x21, ((char *)slide,
122 "error: incompatible zlib version (expected %s, found %s)\n",
123 ZLIB_VERSION, zlib_version));
124 return 3;
125 } else if (strcmp(zlib_version, ZLIB_VERSION) != 0)
126 Info(slide, 0x21, ((char *)slide,
127 "warning: different zlib version (expected %s, using %s)\n",
128 ZLIB_VERSION, zlib_version));
130 /* windowBits = log2(wsize) */
131 for (i = (unsigned)wsize, windowBits = 0;
132 !(i & 1); i >>= 1, ++windowBits);
133 if ((unsigned)windowBits > (unsigned)15)
134 windowBits = 15;
135 else if (windowBits < 8)
136 windowBits = 8;
138 G.dstrm.zalloc = (alloc_func)Z_NULL;
139 G.dstrm.zfree = (free_func)Z_NULL;
141 Trace((stderr, "initializing inflate()\n"));
142 err = inflateInit2(&G.dstrm, -windowBits);
144 if (err == Z_MEM_ERROR)
145 return 3;
146 else if (err != Z_OK)
147 Trace((stderr, "oops! (inflateInit2() err = %d)\n", err));
148 G.inflInit = 1;
151 #ifdef FUNZIP
152 while (err != Z_STREAM_END) {
153 #else /* !FUNZIP */
154 while (G.csize > 0) {
155 Trace((stderr, "first loop: G.csize = %ld\n", G.csize));
156 #endif /* ?FUNZIP */
157 while (G.dstrm.avail_out > 0) {
158 err = inflate(&G.dstrm, Z_PARTIAL_FLUSH);
160 if (err == Z_DATA_ERROR) {
161 retval = 2; goto uzinflate_cleanup_exit;
162 } else if (err == Z_MEM_ERROR) {
163 retval = 3; goto uzinflate_cleanup_exit;
164 } else if (err != Z_OK && err != Z_STREAM_END)
165 Trace((stderr, "oops! (inflate(first loop) err = %d)\n", err));
167 #ifdef FUNZIP
168 if (err == Z_STREAM_END) /* "END-of-entry-condition" ? */
169 #else /* !FUNZIP */
170 if (G.csize <= 0L) /* "END-of-entry-condition" ? */
171 #endif /* ?FUNZIP */
172 break;
174 if (G.dstrm.avail_in <= 0) {
175 if (fillinbuf(__G) == 0) {
176 /* no "END-condition" yet, but no more data */
177 retval = 2; goto uzinflate_cleanup_exit;
180 G.dstrm.next_in = G.inptr;
181 G.dstrm.avail_in = G.incnt;
183 Trace((stderr, " avail_in = %d\n", G.dstrm.avail_in));
185 /* flush slide[] */
186 if ((retval = FLUSH(wsize - G.dstrm.avail_out)) != 0)
187 goto uzinflate_cleanup_exit;
188 Trace((stderr, "inside loop: flushing %ld bytes (ptr diff = %ld)\n",
189 (long)(wsize - G.dstrm.avail_out),
190 (long)(G.dstrm.next_out-(Bytef *)redirSlide)));
191 G.dstrm.next_out = redirSlide;
192 G.dstrm.avail_out = wsize;
195 /* no more input, so loop until we have all output */
196 Trace((stderr, "beginning final loop: err = %d\n", err));
197 while (err != Z_STREAM_END) {
198 err = inflate(&G.dstrm, Z_PARTIAL_FLUSH);
199 if (err == Z_DATA_ERROR) {
200 retval = 2; goto uzinflate_cleanup_exit;
201 } else if (err == Z_MEM_ERROR) {
202 retval = 3; goto uzinflate_cleanup_exit;
203 } else if (err == Z_BUF_ERROR) { /* DEBUG */
204 Trace((stderr,
205 "zlib inflate() did not detect stream end (%s, %s)\n",
206 G.zipfn, G.filename));
207 break;
208 } else if (err != Z_OK && err != Z_STREAM_END) {
209 Trace((stderr, "oops! (inflate(final loop) err = %d)\n", err));
210 DESTROYGLOBALS();
211 EXIT(PK_MEM3);
213 /* final flush of slide[] */
214 if ((retval = FLUSH(wsize - G.dstrm.avail_out)) != 0)
215 goto uzinflate_cleanup_exit;
216 Trace((stderr, "final loop: flushing %ld bytes (ptr diff = %ld)\n",
217 (long)(wsize - G.dstrm.avail_out),
218 (long)(G.dstrm.next_out-(Bytef *)redirSlide)));
219 G.dstrm.next_out = redirSlide;
220 G.dstrm.avail_out = wsize;
222 Trace((stderr, "total in = %ld, total out = %ld\n", G.dstrm.total_in,
223 G.dstrm.total_out));
225 G.inptr = (uch *)G.dstrm.next_in;
226 G.incnt = (G.inbuf + INBUFSIZ) - G.inptr; /* reset for other routines */
228 uzinflate_cleanup_exit:
229 err = inflateReset(&G.dstrm);
230 if (err != Z_OK)
231 Trace((stderr, "oops! (inflateReset() err = %d)\n", err));
233 return retval;
237 /*---------------------------------------------------------------------------*/
238 #else /* !USE_ZLIB */
241 /* Function prototypes */
242 #ifndef OF
243 # ifdef __STDC__
244 # define OF(a) a
245 # else
246 # define OF(a) ()
247 # endif
248 #endif /* !OF */
249 int inflate_codes OF((__GPRO__ struct huft *tl, struct huft *td,
250 int bl, int bd));
251 static int inflate_stored OF((__GPRO));
252 static int inflate_fixed OF((__GPRO));
253 static int inflate_dynamic OF((__GPRO));
254 static int inflate_block OF((__GPRO__ int *e));
257 /* The inflate algorithm uses a sliding 32K byte window on the uncompressed
258 stream to find repeated byte strings. This is implemented here as a
259 circular buffer. The index is updated simply by incrementing and then
260 and'ing with 0x7fff (32K-1). */
261 /* It is left to other modules to supply the 32K area. It is assumed
262 to be usable as if it were declared "uch slide[32768];" or as just
263 "uch *slide;" and then malloc'ed in the latter case. The definition
264 must be in unzip.h, included above. */
267 /* unsigned wp; moved to globals.h */ /* current position in slide */
269 /* Tables for deflate from PKZIP's appnote.txt. */
270 /* - Order of the bit length code lengths */
271 static ZCONST unsigned border[] = {
272 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
274 /* - Copy lengths for literal codes 257..285 */
275 #ifdef USE_DEFLATE64
276 static ZCONST ush cplens64[] = {
277 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
278 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 3, 0, 0};
279 /* For Deflate64, the code 285 is defined differently. */
280 #else
281 # define cplens32 cplens
282 #endif
283 static ZCONST ush cplens32[] = {
284 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
285 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
286 /* note: see note #13 above about the 258 in this list. */
287 /* - Extra bits for literal codes 257..285 */
288 #ifdef USE_DEFLATE64
289 static ZCONST uch cplext64[] = {
290 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
291 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 16, INVALID_CODE, INVALID_CODE};
292 #else
293 # define cplext32 cplext
294 #endif
295 static ZCONST uch cplext32[] = {
296 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
297 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, INVALID_CODE, INVALID_CODE};
299 /* - Copy offsets for distance codes 0..29 (0..31 for Deflate64) */
300 static ZCONST ush cpdist[] = {
301 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
302 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
303 #if (defined(USE_DEFLATE64) || defined(PKZIP_BUG_WORKAROUND))
304 8193, 12289, 16385, 24577, 32769, 49153};
305 #else
306 8193, 12289, 16385, 24577};
307 #endif
309 /* - Extra bits for distance codes 0..29 (0..31 for Deflate64) */
310 #ifdef USE_DEFLATE64
311 static ZCONST uch cpdext64[] = {
312 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
313 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
314 12, 12, 13, 13, 14, 14};
315 #else
316 # define cpdext32 cpdext
317 #endif
318 static ZCONST uch cpdext32[] = {
319 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
320 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
321 #ifdef PKZIP_BUG_WORKAROUND
322 12, 12, 13, 13, INVALID_CODE, INVALID_CODE};
323 #else
324 12, 12, 13, 13};
325 #endif
327 #ifdef PKZIP_BUG_WORKAROUND
328 # define MAXLITLENS 288
329 #else
330 # define MAXLITLENS 286
331 #endif
332 #if (defined(USE_DEFLATE64) || defined(PKZIP_BUG_WORKAROUND))
333 # define MAXDISTS 32
334 #else
335 # define MAXDISTS 30
336 #endif
339 /* moved to consts.h (included in unzip.c), resp. funzip.c */
340 #if 0
341 /* And'ing with mask_bits[n] masks the lower n bits */
342 ZCONST ush near mask_bits[] = {
343 0x0000,
344 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
345 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
347 #endif /* 0 */
350 /* Macros for inflate() bit peeking and grabbing.
351 The usage is:
353 NEEDBITS(j)
354 x = b & mask_bits[j];
355 DUMPBITS(j)
357 where NEEDBITS makes sure that b has at least j bits in it, and
358 DUMPBITS removes the bits from b. The macros use the variable k
359 for the number of bits in b. Normally, b and k are register
360 variables for speed and are initialized at the begining of a
361 routine that uses these macros from a global bit buffer and count.
363 In order to not ask for more bits than there are in the compressed
364 stream, the Huffman tables are constructed to only ask for just
365 enough bits to make up the end-of-block code (value 256). Then no
366 bytes need to be "returned" to the buffer at the end of the last
367 block. See the huft_build() routine.
370 /* These have been moved to globals.h */
371 #if 0
372 ulg bb; /* bit buffer */
373 unsigned bk; /* bits in bit buffer */
374 #endif
376 #ifndef CHECK_EOF
377 # define CHECK_EOF /* default as of 5.13/5.2 */
378 #endif
380 #ifndef CHECK_EOF
381 # define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE)<<k;k+=8;}}
382 #else
383 # define NEEDBITS(n) {while(k<(n)){int c=NEXTBYTE;\
384 if(c==EOF){retval=1;goto cleanup_and_exit;}\
385 b|=((ulg)c)<<k;k+=8;}}
386 #endif
388 #define DUMPBITS(n) {b>>=(n);k-=(n);}
392 Huffman code decoding is performed using a multi-level table lookup.
393 The fastest way to decode is to simply build a lookup table whose
394 size is determined by the longest code. However, the time it takes
395 to build this table can also be a factor if the data being decoded
396 are not very long. The most common codes are necessarily the
397 shortest codes, so those codes dominate the decoding time, and hence
398 the speed. The idea is you can have a shorter table that decodes the
399 shorter, more probable codes, and then point to subsidiary tables for
400 the longer codes. The time it costs to decode the longer codes is
401 then traded against the time it takes to make longer tables.
403 This results of this trade are in the variables lbits and dbits
404 below. lbits is the number of bits the first level table for literal/
405 length codes can decode in one step, and dbits is the same thing for
406 the distance codes. Subsequent tables are also less than or equal to
407 those sizes. These values may be adjusted either when all of the
408 codes are shorter than that, in which case the longest code length in
409 bits is used, or when the shortest code is *longer* than the requested
410 table size, in which case the length of the shortest code in bits is
411 used.
413 There are two different values for the two tables, since they code a
414 different number of possibilities each. The literal/length table
415 codes 286 possible values, or in a flat code, a little over eight
416 bits. The distance table codes 30 possible values, or a little less
417 than five bits, flat. The optimum values for speed end up being
418 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
419 The optimum values may differ though from machine to machine, and
420 possibly even between compilers. Your mileage may vary.
424 static ZCONST int lbits = 9; /* bits in base literal/length lookup table */
425 static ZCONST int dbits = 6; /* bits in base distance lookup table */
428 #ifndef ASM_INFLATECODES
430 int inflate_codes(__G__ tl, td, bl, bd)
431 __GDEF
432 struct huft *tl, *td; /* literal/length and distance decoder tables */
433 int bl, bd; /* number of bits decoded by tl[] and td[] */
434 /* inflate (decompress) the codes in a deflated (compressed) block.
435 Return an error code or zero if it all goes ok. */
437 register unsigned e; /* table entry flag/number of extra bits */
438 unsigned d; /* index for copy */
439 UINT_D64 n; /* length for copy (deflate64: might be 64k+2) */
440 UINT_D64 w; /* current window position (deflate64: up to 64k) */
441 struct huft *t; /* pointer to table entry */
442 unsigned ml, md; /* masks for bl and bd bits */
443 register ulg b; /* bit buffer */
444 register unsigned k; /* number of bits in bit buffer */
445 int retval = 0; /* error code returned: initialized to "no error" */
448 /* make local copies of globals */
449 b = G.bb; /* initialize bit buffer */
450 k = G.bk;
451 w = G.wp; /* initialize window position */
454 /* inflate the coded data */
455 ml = mask_bits[bl]; /* precompute masks for speed */
456 md = mask_bits[bd];
457 while (1) /* do until end of block */
459 NEEDBITS((unsigned)bl)
460 t = tl + ((unsigned)b & ml);
461 while (1) {
462 DUMPBITS(t->b)
464 if ((e = t->e) == 32) /* then it's a literal */
466 redirSlide[w++] = (uch)t->v.n;
467 if (w == wsize)
469 if ((retval = FLUSH(w)) != 0) goto cleanup_and_exit;
470 w = 0;
472 break;
475 if (e < 31) /* then it's a length */
477 /* get length of block to copy */
478 NEEDBITS(e)
479 n = t->v.n + ((unsigned)b & mask_bits[e]);
480 DUMPBITS(e)
482 /* decode distance of block to copy */
483 NEEDBITS((unsigned)bd)
484 t = td + ((unsigned)b & md);
485 while (1) {
486 DUMPBITS(t->b)
487 if ((e = t->e) < 32)
488 break;
489 if (IS_INVALID_CODE(e))
490 return 1;
491 e &= 31;
492 NEEDBITS(e)
493 t = t->v.t + ((unsigned)b & mask_bits[e]);
495 NEEDBITS(e)
496 d = (unsigned)w - t->v.n - ((unsigned)b & mask_bits[e]);
497 DUMPBITS(e)
499 /* do the copy */
500 do {
501 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
502 if (G.redirect_slide) {
503 /* &= w/ wsize unnecessary & wrong if redirect */
504 if ((UINT_D64)d >= wsize)
505 return 1; /* invalid compressed data */
506 e = (unsigned)(wsize - (d > (unsigned)w ? (UINT_D64)d : w));
508 else
509 #endif
510 e = (unsigned)(wsize -
511 ((d &= (unsigned)(wsize-1)) > (unsigned)w ?
512 (UINT_D64)d : w));
513 if ((UINT_D64)e > n) e = (unsigned)n;
514 n -= e;
515 #ifndef NOMEMCPY
516 if ((unsigned)w - d >= e)
517 /* (this test assumes unsigned comparison) */
519 memcpy(redirSlide + (unsigned)w, redirSlide + d, e);
520 w += e;
521 d += e;
523 else /* do it slowly to avoid memcpy() overlap */
524 #endif /* !NOMEMCPY */
525 do {
526 redirSlide[w++] = redirSlide[d++];
527 } while (--e);
528 if (w == wsize)
530 if ((retval = FLUSH(w)) != 0) goto cleanup_and_exit;
531 w = 0;
533 } while (n);
534 break;
537 if (e == 31) /* it's the EOB signal */
539 /* sorry for this goto, but we have to exit two loops at once */
540 goto cleanup_decode;
543 if (IS_INVALID_CODE(e))
544 return 1;
546 e &= 31;
547 NEEDBITS(e)
548 t = t->v.t + ((unsigned)b & mask_bits[e]);
551 cleanup_decode:
553 /* restore the globals from the locals */
554 G.wp = (unsigned)w; /* restore global window pointer */
555 G.bb = b; /* restore global bit buffer */
556 G.bk = k;
559 cleanup_and_exit:
560 /* done */
561 return retval;
564 #endif /* ASM_INFLATECODES */
568 static int inflate_stored(__G)
569 __GDEF
570 /* "decompress" an inflated type 0 (stored) block. */
572 UINT_D64 w; /* current window position (deflate64: up to 64k!) */
573 unsigned n; /* number of bytes in block */
574 register ulg b; /* bit buffer */
575 register unsigned k; /* number of bits in bit buffer */
576 int retval = 0; /* error code returned: initialized to "no error" */
579 /* make local copies of globals */
580 Trace((stderr, "\nstored block"));
581 b = G.bb; /* initialize bit buffer */
582 k = G.bk;
583 w = G.wp; /* initialize window position */
586 /* go to byte boundary */
587 n = k & 7;
588 DUMPBITS(n);
591 /* get the length and its complement */
592 NEEDBITS(16)
593 n = ((unsigned)b & 0xffff);
594 DUMPBITS(16)
595 NEEDBITS(16)
596 if (n != (unsigned)((~b) & 0xffff))
597 return 1; /* error in compressed data */
598 DUMPBITS(16)
601 /* read and output the compressed data */
602 while (n--)
604 NEEDBITS(8)
605 redirSlide[w++] = (uch)b;
606 if (w == wsize)
608 if ((retval = FLUSH(w)) != 0) goto cleanup_and_exit;
609 w = 0;
611 DUMPBITS(8)
615 /* restore the globals from the locals */
616 G.wp = (unsigned)w; /* restore global window pointer */
617 G.bb = b; /* restore global bit buffer */
618 G.bk = k;
620 cleanup_and_exit:
621 return retval;
625 /* Globals for literal tables (built once) */
626 /* Moved to globals.h */
627 #if 0
628 struct huft *fixed_tl = (struct huft *)NULL;
629 struct huft *fixed_td;
630 int fixed_bl, fixed_bd;
631 #endif
633 static int inflate_fixed(__G)
634 __GDEF
635 /* decompress an inflated type 1 (fixed Huffman codes) block. We should
636 either replace this with a custom decoder, or at least precompute the
637 Huffman tables. */
639 /* if first time, set up tables for fixed blocks */
640 Trace((stderr, "\nliteral block"));
641 if (G.fixed_tl == (struct huft *)NULL)
643 int i; /* temporary variable */
644 unsigned l[288]; /* length list for huft_build */
646 /* literal table */
647 for (i = 0; i < 144; i++)
648 l[i] = 8;
649 for (; i < 256; i++)
650 l[i] = 9;
651 for (; i < 280; i++)
652 l[i] = 7;
653 for (; i < 288; i++) /* make a complete, but wrong code set */
654 l[i] = 8;
655 G.fixed_bl = 7;
656 #ifdef USE_DEFLATE64
657 if ((i = huft_build(__G__ l, 288, 257, G.cplens, G.cplext,
658 &G.fixed_tl, &G.fixed_bl)) != 0)
659 #else
660 if ((i = huft_build(__G__ l, 288, 257, cplens, cplext,
661 &G.fixed_tl, &G.fixed_bl)) != 0)
662 #endif
664 G.fixed_tl = (struct huft *)NULL;
665 return i;
668 /* distance table */
669 for (i = 0; i < MAXDISTS; i++) /* make an incomplete code set */
670 l[i] = 5;
671 G.fixed_bd = 5;
672 #ifdef USE_DEFLATE64
673 if ((i = huft_build(__G__ l, MAXDISTS, 0, cpdist, G.cpdext,
674 &G.fixed_td, &G.fixed_bd)) > 1)
675 #else
676 if ((i = huft_build(__G__ l, MAXDISTS, 0, cpdist, cpdext,
677 &G.fixed_td, &G.fixed_bd)) > 1)
678 #endif
680 huft_free(G.fixed_tl);
681 G.fixed_td = G.fixed_tl = (struct huft *)NULL;
682 return i;
686 /* decompress until an end-of-block code */
687 return inflate_codes(__G__ G.fixed_tl, G.fixed_td,
688 G.fixed_bl, G.fixed_bd);
693 static int inflate_dynamic(__G)
694 __GDEF
695 /* decompress an inflated type 2 (dynamic Huffman codes) block. */
697 int i; /* temporary variables */
698 unsigned j;
699 unsigned l; /* last length */
700 unsigned m; /* mask for bit lengths table */
701 unsigned n; /* number of lengths to get */
702 struct huft *tl; /* literal/length code table */
703 struct huft *td; /* distance code table */
704 int bl; /* lookup bits for tl */
705 int bd; /* lookup bits for td */
706 unsigned nb; /* number of bit length codes */
707 unsigned nl; /* number of literal/length codes */
708 unsigned nd; /* number of distance codes */
709 unsigned ll[MAXLITLENS+MAXDISTS]; /* lit./length and distance code lengths */
710 register ulg b; /* bit buffer */
711 register unsigned k; /* number of bits in bit buffer */
712 int retval = 0; /* error code returned: initialized to "no error" */
715 /* make local bit buffer */
716 Trace((stderr, "\ndynamic block"));
717 b = G.bb;
718 k = G.bk;
721 /* read in table lengths */
722 NEEDBITS(5)
723 nl = 257 + ((unsigned)b & 0x1f); /* number of literal/length codes */
724 DUMPBITS(5)
725 NEEDBITS(5)
726 nd = 1 + ((unsigned)b & 0x1f); /* number of distance codes */
727 DUMPBITS(5)
728 NEEDBITS(4)
729 nb = 4 + ((unsigned)b & 0xf); /* number of bit length codes */
730 DUMPBITS(4)
731 if (nl > MAXLITLENS || nd > MAXDISTS)
732 return 1; /* bad lengths */
735 /* read in bit-length-code lengths */
736 for (j = 0; j < nb; j++)
738 NEEDBITS(3)
739 ll[border[j]] = (unsigned)b & 7;
740 DUMPBITS(3)
742 for (; j < 19; j++)
743 ll[border[j]] = 0;
746 /* build decoding table for trees--single level, 7 bit lookup */
747 bl = 7;
748 retval = huft_build(__G__ ll, 19, 19, NULL, NULL, &tl, &bl);
749 if (bl == 0) /* no bit lengths */
750 retval = 1;
751 if (retval)
753 if (retval == 1)
754 huft_free(tl);
755 return retval; /* incomplete code set */
759 /* read in literal and distance code lengths */
760 n = nl + nd;
761 m = mask_bits[bl];
762 i = l = 0;
763 while ((unsigned)i < n)
765 NEEDBITS((unsigned)bl)
766 j = (td = tl + ((unsigned)b & m))->b;
767 DUMPBITS(j)
768 j = td->v.n;
769 if (j < 16) /* length of code in bits (0..15) */
770 ll[i++] = l = j; /* save last length in l */
771 else if (j == 16) /* repeat last length 3 to 6 times */
773 NEEDBITS(2)
774 j = 3 + ((unsigned)b & 3);
775 DUMPBITS(2)
776 if ((unsigned)i + j > n)
777 return 1;
778 while (j--)
779 ll[i++] = l;
781 else if (j == 17) /* 3 to 10 zero length codes */
783 NEEDBITS(3)
784 j = 3 + ((unsigned)b & 7);
785 DUMPBITS(3)
786 if ((unsigned)i + j > n)
787 return 1;
788 while (j--)
789 ll[i++] = 0;
790 l = 0;
792 else /* j == 18: 11 to 138 zero length codes */
794 NEEDBITS(7)
795 j = 11 + ((unsigned)b & 0x7f);
796 DUMPBITS(7)
797 if ((unsigned)i + j > n)
798 return 1;
799 while (j--)
800 ll[i++] = 0;
801 l = 0;
806 /* free decoding table for trees */
807 huft_free(tl);
810 /* restore the global bit buffer */
811 G.bb = b;
812 G.bk = k;
815 /* build the decoding tables for literal/length and distance codes */
816 bl = lbits;
817 #ifdef USE_DEFLATE64
818 retval = huft_build(__G__ ll, nl, 257, G.cplens, G.cplext, &tl, &bl);
819 #else
820 retval = huft_build(__G__ ll, nl, 257, cplens, cplext, &tl, &bl);
821 #endif
822 if (bl == 0) /* no literals or lengths */
823 retval = 1;
824 if (retval)
826 if (retval == 1) {
827 if (!uO.qflag)
828 MESSAGE((uch *)"(incomplete l-tree) ", 21L, 1);
829 huft_free(tl);
831 return retval; /* incomplete code set */
833 bd = dbits;
834 #ifdef USE_DEFLATE64
835 retval = huft_build(__G__ ll + nl, nd, 0, cpdist, G.cpdext, &td, &bd);
836 #else
837 retval = huft_build(__G__ ll + nl, nd, 0, cpdist, cpdext, &td, &bd);
838 #endif
839 #ifdef PKZIP_BUG_WORKAROUND
840 if (retval == 1)
841 retval = 0;
842 #endif
843 if (bd == 0 && nl > 257) /* lengths but no distances */
844 retval = 1;
845 if (retval)
847 if (retval == 1) {
848 if (!uO.qflag)
849 MESSAGE((uch *)"(incomplete d-tree) ", 21L, 1);
850 huft_free(td);
852 huft_free(tl);
853 return retval;
856 /* decompress until an end-of-block code */
857 retval = inflate_codes(__G__ tl, td, bl, bd);
859 cleanup_and_exit:
860 /* free the decoding tables, return */
861 huft_free(tl);
862 huft_free(td);
863 return retval;
868 static int inflate_block(__G__ e)
869 __GDEF
870 int *e; /* last block flag */
871 /* decompress an inflated block */
873 unsigned t; /* block type */
874 register ulg b; /* bit buffer */
875 register unsigned k; /* number of bits in bit buffer */
876 int retval = 0; /* error code returned: initialized to "no error" */
879 /* make local bit buffer */
880 b = G.bb;
881 k = G.bk;
884 /* read in last block bit */
885 NEEDBITS(1)
886 *e = (int)b & 1;
887 DUMPBITS(1)
890 /* read in block type */
891 NEEDBITS(2)
892 t = (unsigned)b & 3;
893 DUMPBITS(2)
896 /* restore the global bit buffer */
897 G.bb = b;
898 G.bk = k;
901 /* inflate that block type */
902 if (t == 2)
903 return inflate_dynamic(__G);
904 if (t == 0)
905 return inflate_stored(__G);
906 if (t == 1)
907 return inflate_fixed(__G);
910 /* bad block type */
911 retval = 2;
913 cleanup_and_exit:
914 return retval;
919 int inflate(__G__ is_defl64)
920 __GDEF
921 int is_defl64;
922 /* decompress an inflated entry */
924 int e; /* last block flag */
925 int r; /* result code */
926 #ifdef DEBUG
927 unsigned h = 0; /* maximum struct huft's malloc'ed */
928 #endif
930 #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
931 if (G.redirect_slide)
932 wsize = G.redirect_size, redirSlide = G.redirect_buffer;
933 else
934 wsize = WSIZE, redirSlide = slide; /* how they're #defined if !DLL */
935 #endif
937 /* initialize window, bit buffer */
938 G.wp = 0;
939 G.bk = 0;
940 G.bb = 0;
942 #ifdef USE_DEFLATE64
943 if (is_defl64) {
944 G.cplens = cplens64;
945 G.cplext = cplext64;
946 G.cpdext = cpdext64;
947 G.fixed_tl = G.fixed_tl64;
948 G.fixed_bl = G.fixed_bl64;
949 G.fixed_td = G.fixed_td64;
950 G.fixed_bd = G.fixed_bd64;
951 } else {
952 G.cplens = cplens32;
953 G.cplext = cplext32;
954 G.cpdext = cpdext32;
955 G.fixed_tl = G.fixed_tl32;
956 G.fixed_bl = G.fixed_bl32;
957 G.fixed_td = G.fixed_td32;
958 G.fixed_bd = G.fixed_bd32;
960 #else /* !USE_DEFLATE64 */
961 if (is_defl64) {
962 /* This should not happen unless UnZip is built from object files
963 * compiled with inconsistent option setting. Handle this by
964 * returning with "bad input" error code.
966 Trace((stderr, "\nThis inflate() cannot handle Deflate64!\n"));
967 return 2;
969 #endif /* ?USE_DEFLATE64 */
971 /* decompress until the last block */
972 do {
973 #ifdef DEBUG
974 G.hufts = 0;
975 #endif
976 if ((r = inflate_block(__G__ &e)) != 0)
977 return r;
978 #ifdef DEBUG
979 if (G.hufts > h)
980 h = G.hufts;
981 #endif
982 } while (!e);
984 Trace((stderr, "\n%u bytes in Huffman tables (%u/entry)\n",
985 h * (unsigned)sizeof(struct huft), (unsigned)sizeof(struct huft)));
987 #ifdef USE_DEFLATE64
988 if (is_defl64) {
989 G.fixed_tl64 = G.fixed_tl;
990 G.fixed_bl64 = G.fixed_bl;
991 G.fixed_td64 = G.fixed_td;
992 G.fixed_bd64 = G.fixed_bd;
993 } else {
994 G.fixed_tl32 = G.fixed_tl;
995 G.fixed_bl32 = G.fixed_bl;
996 G.fixed_td32 = G.fixed_td;
997 G.fixed_bd32 = G.fixed_bd;
999 #endif
1001 /* flush out redirSlide and return (success, unless final FLUSH failed) */
1002 return (FLUSH(G.wp));
1007 int inflate_free(__G)
1008 __GDEF
1010 if (G.fixed_tl != (struct huft *)NULL)
1012 huft_free(G.fixed_td);
1013 huft_free(G.fixed_tl);
1014 G.fixed_td = G.fixed_tl = (struct huft *)NULL;
1016 return 0;
1019 #endif /* ?USE_ZLIB */
1023 * GRR: moved huft_build() and huft_free() down here; used by explode()
1024 * and fUnZip regardless of whether USE_ZLIB defined or not
1028 /* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
1029 #define BMAX 16 /* maximum bit length of any code (16 for explode) */
1030 #define N_MAX 288 /* maximum number of codes in any set */
1033 int huft_build(__G__ b, n, s, d, e, t, m)
1034 __GDEF
1035 ZCONST unsigned *b; /* code lengths in bits (all assumed <= BMAX) */
1036 unsigned n; /* number of codes (assumed <= N_MAX) */
1037 unsigned s; /* number of simple-valued codes (0..s-1) */
1038 ZCONST ush *d; /* list of base values for non-simple codes */
1039 ZCONST uch *e; /* list of extra bits for non-simple codes */
1040 struct huft **t; /* result: starting table */
1041 int *m; /* maximum lookup bits, returns actual */
1042 /* Given a list of code lengths and a maximum table size, make a set of
1043 tables to decode that set of codes. Return zero on success, one if
1044 the given code set is incomplete (the tables are still built in this
1045 case), two if the input is invalid (all zero length codes or an
1046 oversubscribed set of lengths), and three if not enough memory.
1047 The code with value 256 is special, and the tables are constructed
1048 so that no bits beyond that code are fetched when that code is
1049 decoded. */
1051 unsigned a; /* counter for codes of length k */
1052 unsigned c[BMAX+1]; /* bit length count table */
1053 unsigned el; /* length of EOB code (value 256) */
1054 unsigned f; /* i repeats in table every f entries */
1055 int g; /* maximum code length */
1056 int h; /* table level */
1057 register unsigned i; /* counter, current code */
1058 register unsigned j; /* counter */
1059 register int k; /* number of bits in current code */
1060 int lx[BMAX+1]; /* memory for l[-1..BMAX-1] */
1061 int *l = lx+1; /* stack of bits per table */
1062 register unsigned *p; /* pointer into c[], b[], or v[] */
1063 register struct huft *q; /* points to current table */
1064 struct huft r; /* table entry for structure assignment */
1065 struct huft *u[BMAX]; /* table stack */
1066 unsigned v[N_MAX]; /* values in order of bit length */
1067 register int w; /* bits before this table == (l * h) */
1068 unsigned x[BMAX+1]; /* bit offsets, then code stack */
1069 unsigned *xp; /* pointer into x */
1070 int y; /* number of dummy codes added */
1071 unsigned z; /* number of entries in current table */
1074 /* Generate counts for each bit length */
1075 el = n > 256 ? b[256] : BMAX; /* set length of EOB code, if any */
1076 memzero((char *)c, sizeof(c));
1077 p = (unsigned *)b; i = n;
1078 do {
1079 c[*p]++; p++; /* assume all entries <= BMAX */
1080 } while (--i);
1081 if (c[0] == n) /* null input--all zero length codes */
1083 *t = (struct huft *)NULL;
1084 *m = 0;
1085 return 0;
1089 /* Find minimum and maximum length, bound *m by those */
1090 for (j = 1; j <= BMAX; j++)
1091 if (c[j])
1092 break;
1093 k = j; /* minimum code length */
1094 if ((unsigned)*m < j)
1095 *m = j;
1096 for (i = BMAX; i; i--)
1097 if (c[i])
1098 break;
1099 g = i; /* maximum code length */
1100 if ((unsigned)*m > i)
1101 *m = i;
1104 /* Adjust last length count to fill out codes, if needed */
1105 for (y = 1 << j; j < i; j++, y <<= 1)
1106 if ((y -= c[j]) < 0)
1107 return 2; /* bad input: more codes than bits */
1108 if ((y -= c[i]) < 0)
1109 return 2;
1110 c[i] += y;
1113 /* Generate starting offsets into the value table for each length */
1114 x[1] = j = 0;
1115 p = c + 1; xp = x + 2;
1116 while (--i) { /* note that i == g from above */
1117 *xp++ = (j += *p++);
1121 /* Make a table of values in order of bit lengths */
1122 memzero((char *)v, sizeof(v));
1123 p = (unsigned *)b; i = 0;
1124 do {
1125 if ((j = *p++) != 0)
1126 v[x[j]++] = i;
1127 } while (++i < n);
1128 n = x[g]; /* set n to length of v */
1131 /* Generate the Huffman codes and for each, make the table entries */
1132 x[0] = i = 0; /* first Huffman code is zero */
1133 p = v; /* grab values in bit order */
1134 h = -1; /* no tables yet--level -1 */
1135 w = l[-1] = 0; /* no bits decoded yet */
1136 u[0] = (struct huft *)NULL; /* just to keep compilers happy */
1137 q = (struct huft *)NULL; /* ditto */
1138 z = 0; /* ditto */
1140 /* go through the bit lengths (k already is bits in shortest code) */
1141 for (; k <= g; k++)
1143 a = c[k];
1144 while (a--)
1146 /* here i is the Huffman code of length k bits for value *p */
1147 /* make tables up to required level */
1148 while (k > w + l[h])
1150 w += l[h++]; /* add bits already decoded */
1152 /* compute minimum size table less than or equal to *m bits */
1153 z = (z = g - w) > (unsigned)*m ? *m : z; /* upper limit */
1154 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */
1155 { /* too few codes for k-w bit table */
1156 f -= a + 1; /* deduct codes from patterns left */
1157 xp = c + k;
1158 while (++j < z) /* try smaller tables up to z bits */
1160 if ((f <<= 1) <= *++xp)
1161 break; /* enough codes to use up j bits */
1162 f -= *xp; /* else deduct codes from patterns */
1165 if ((unsigned)w + j > el && (unsigned)w < el)
1166 j = el - w; /* make EOB code end at table */
1167 z = 1 << j; /* table entries for j-bit table */
1168 l[h] = j; /* set table size in stack */
1170 /* allocate and link in new table */
1171 if ((q = (struct huft *)malloc((z + 1)*sizeof(struct huft))) ==
1172 (struct huft *)NULL)
1174 if (h)
1175 huft_free(u[0]);
1176 return 3; /* not enough memory */
1178 #ifdef DEBUG
1179 G.hufts += z + 1; /* track memory usage */
1180 #endif
1181 *t = q + 1; /* link to list for huft_free() */
1182 *(t = &(q->v.t)) = (struct huft *)NULL;
1183 u[h] = ++q; /* table starts after link */
1185 /* connect to last table, if there is one */
1186 if (h)
1188 x[h] = i; /* save pattern for backing up */
1189 r.b = (uch)l[h-1]; /* bits to dump before this table */
1190 r.e = (uch)(32 + j); /* bits in this table */
1191 r.v.t = q; /* pointer to this table */
1192 j = (i & ((1 << w) - 1)) >> (w - l[h-1]);
1193 u[h-1][j] = r; /* connect to last table */
1197 /* set up table entry in r */
1198 r.b = (uch)(k - w);
1199 if (p >= v + n)
1200 r.e = INVALID_CODE; /* out of values--invalid code */
1201 else if (*p < s)
1203 r.e = (uch)(*p < 256 ? 32 : 31); /* 256 is end-of-block code */
1204 r.v.n = (ush)*p++; /* simple code is just the value */
1206 else
1208 r.e = e[*p - s]; /* non-simple--look up in lists */
1209 r.v.n = d[*p++ - s];
1212 /* fill code-like entries with r */
1213 f = 1 << (k - w);
1214 for (j = i >> w; j < z; j += f)
1215 q[j] = r;
1217 /* backwards increment the k-bit code i */
1218 for (j = 1 << (k - 1); i & j; j >>= 1)
1219 i ^= j;
1220 i ^= j;
1222 /* backup over finished tables */
1223 while ((i & ((1 << w) - 1)) != x[h])
1224 w -= l[--h]; /* don't need to update q */
1229 /* return actual size of base table */
1230 *m = l[0];
1233 /* Return true (1) if we were given an incomplete table */
1234 return y != 0 && g != 1;
1239 int huft_free(t)
1240 struct huft *t; /* table to free */
1241 /* Free the malloc'ed tables built by huft_build(), which makes a linked
1242 list of the tables it made, with the links in a dummy first entry of
1243 each table. */
1245 register struct huft *p, *q;
1248 /* Go through linked list, freeing from the malloced (t[-1]) address. */
1249 p = t;
1250 while (p != (struct huft *)NULL)
1252 q = (--p)->v.t;
1253 free((zvoid *)p);
1254 p = q;
1256 return 0;