Initial commit
[nyanbb.git] / archival / libarchive / decompress_unlzma.c
blob8e02ecd39a5ec31fcbfd12b8488dd0dd4a95f32b
1 /* vi: set sw=4 ts=4: */
2 /*
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
11 #include <stdint.h>
13 #include "bb/lib/public.h"
15 #if 0
16 # define dbg(...) bb_error_msg(__VA_ARGS__)
17 #else
18 # define dbg(...) ((void)0)
19 #endif
21 struct bb_archive_unlzma_rc_t {
22 int fd;
23 uint8_t *ptr;
25 /* Was keeping rc on stack in unlzma and separately allocating buffer,
26 * but with "buffer 'attached to' allocated rc" code is smaller: */
27 /* uint8_t *buffer; */
28 #define RC_BUFFER ((uint8_t*)(rc+1))
30 uint8_t *buffer_end;
32 /* Had provisions for variable buffer, but we don't need it here */
33 /* int buffer_size; */
34 #define RC_BUFFER_SIZE 0x10000
36 uint32_t code;
37 uint32_t range;
38 uint32_t bound;
41 #define RC_TOP_BITS 24
42 #define RC_MOVE_BITS 5
43 #define RC_MODEL_TOTAL_BITS 11
46 /* Called once in rc_do_normalize() */
47 BB_STATIC void bb_archive_unlzma_rc_read(struct bb_archive_unlzma_rc_t *rc)
49 int buffer_size = bb_safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE);
50 //TODO: return -1 instead
51 //This will make unlzma delete broken unpacked file on unpack errors
52 if (buffer_size <= 0)
53 bb_simple_error_msg_and_die("unexpected EOF");
54 rc->buffer_end = RC_BUFFER + buffer_size;
55 rc->ptr = RC_BUFFER;
58 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
59 BB_STATIC void bb_archive_unlzma_rc_do_normalize(struct bb_archive_unlzma_rc_t *rc)
61 if (rc->ptr >= rc->buffer_end)
62 bb_archive_unlzma_rc_read(rc);
63 rc->range <<= 8;
64 rc->code = (rc->code << 8) | *rc->ptr++;
67 BB_STATIC void bb_archive_unlzma_rc_normalize(struct bb_archive_unlzma_rc_t *rc)
69 if (rc->range < (1 << RC_TOP_BITS)) {
70 bb_archive_unlzma_rc_do_normalize(rc);
74 /* Called once */
75 BB_STATIC struct bb_archive_unlzma_rc_t* bb_archive_unlzma_rc_init(int fd) /*, int buffer_size) */
77 int i;
78 struct bb_archive_unlzma_rc_t *rc;
80 rc = bb_xzalloc(sizeof(*rc) + RC_BUFFER_SIZE);
82 rc->fd = fd;
83 /* rc->ptr = rc->buffer_end; */
85 for (i = 0; i < 5; i++) {
86 bb_archive_unlzma_rc_do_normalize(rc);
88 rc->range = 0xffffffff;
89 return rc;
92 /* Called once */
93 BB_STATIC void bb_archive_unlzma_rc_free(struct bb_archive_unlzma_rc_t *rc)
95 free(rc);
98 /* rc_is_bit_1 is called 9 times */
99 BB_STATIC int bb_archive_unlzma_rc_is_bit_1(struct bb_archive_unlzma_rc_t *rc,
100 uint16_t *p)
102 bb_archive_unlzma_rc_normalize(rc);
103 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
104 if (rc->code < rc->bound) {
105 rc->range = rc->bound;
106 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
107 return 0;
109 rc->range -= rc->bound;
110 rc->code -= rc->bound;
111 *p -= *p >> RC_MOVE_BITS;
112 return 1;
115 /* Called 4 times in unlzma loop */
116 BB_STATIC int bb_archive_unlzma_rc_get_bit(struct bb_archive_unlzma_rc_t *rc,
117 uint16_t *p, int *symbol)
119 int ret = bb_archive_unlzma_rc_is_bit_1(rc, p);
120 *symbol = *symbol * 2 + ret;
121 return ret;
124 /* Called once */
125 BB_STATIC int bb_archive_unlzma_rc_direct_bit(struct bb_archive_unlzma_rc_t *rc)
127 bb_archive_unlzma_rc_normalize(rc);
128 rc->range >>= 1;
129 if (rc->code >= rc->range) {
130 rc->code -= rc->range;
131 return 1;
133 return 0;
136 /* Called twice */
137 BB_STATIC void bb_archive_unlzma_rc_bit_tree_decode(struct bb_archive_unlzma_rc_t *rc, uint16_t *p,
138 int num_levels, int *symbol)
140 int i = num_levels;
142 *symbol = 1;
143 while (i--)
144 bb_archive_unlzma_rc_get_bit(rc, p + *symbol, symbol);
145 *symbol -= 1 << num_levels;
149 struct bb_archive_unlzma_lzma_header_t {
150 uint8_t pos_u8;
151 uint8_t dict_size_u32[4];
152 uint8_t dst_size_u64[8];
153 }; /* PACKED */
155 /* #defines will force compiler to compute/optimize each one with each usage.
156 * Have heart and use enum instead. */
157 enum {
158 BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE = 1846,
159 BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE = 768,
161 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX = 4,
163 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS = 3,
164 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS = 3,
165 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS = 8,
167 BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE = 0,
168 BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2 = (BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE + 1),
169 BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW = (BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2 + 1),
170 BB_ARCHIVE_UNLZMA_LZMA_LEN_MID = (BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW \
171 + (1 << (BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
172 + BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS))),
173 BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH = (BB_ARCHIVE_UNLZMA_LZMA_LEN_MID \
174 + (1 << (BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
175 + BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS))),
176 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS = (BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH
177 + (1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS)),
179 BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES = 12,
180 BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES = 7,
182 BB_ARCHIVE_UNLZMA_LZMA_START_POS_MODEL_INDEX = 4,
183 BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX = 14,
184 BB_ARCHIVE_UNLZMA_LZMA_NUM_FULL_DISTANCES = (1
185 << (BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX >> 1)),
187 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS = 6,
188 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES = 4,
190 BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS = 4,
192 BB_ARCHIVE_UNLZMA_LZMA_MATCH_MIN_LEN = 2,
194 BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH = 0,
195 BB_ARCHIVE_UNLZMA_LZMA_IS_REP = (BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH
196 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX)),
197 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0 = (BB_ARCHIVE_UNLZMA_LZMA_IS_REP
198 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES),
199 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1 = (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0
200 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES),
201 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2 = (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1
202 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES),
203 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG = (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2
204 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES),
205 BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT = (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG \
206 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX)),
207 BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS = (BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT \
208 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES
209 << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS)),
210 BB_ARCHIVE_UNLZMA_LZMA_ALIGN = (BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS \
211 + BB_ARCHIVE_UNLZMA_LZMA_NUM_FULL_DISTANCES
212 - BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX),
213 BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER = (BB_ARCHIVE_UNLZMA_LZMA_ALIGN
214 + (1 << BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS)),
215 BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER = (BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER
216 + BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS),
217 BB_ARCHIVE_UNLZMA_LZMA_LITERAL = (BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER
218 + BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS),
222 BB_STATIC long bb_archive_unpack_lzma_stream(bb_archive_transformer_state_t *xstate)
224 long total_written = 0;
225 struct bb_archive_unlzma_lzma_header_t header;
226 int lc, pb, lp;
227 uint32_t pos_state_mask;
228 uint32_t literal_pos_mask;
229 uint16_t *p;
230 struct bb_archive_unlzma_rc_t *rc;
231 int i;
232 uint8_t *buffer;
233 uint32_t buffer_size;
234 uint8_t previous_byte = 0;
235 size_t buffer_pos = 0, global_pos = 0;
236 int len = 0;
237 int state = 0;
238 uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
240 if (bb_full_read(xstate->src_fd, &header, sizeof(header)) != sizeof(header)
241 || header.pos_u8 >= (9 * 5 * 5)
243 bb_simple_error_msg("bad lzma header");
244 return -1;
247 i = header.pos_u8 / 9;
248 lc = header.pos_u8 % 9;
249 pb = i / 5;
250 lp = i % 5;
251 pos_state_mask = (1 << pb) - 1;
252 literal_pos_mask = (1 << lp) - 1;
254 /* Example values from linux-3.3.4.tar.lzma:
255 * dict_size: 64M, dst_size: 2^64-1
257 *(uint32_t*)(header.dict_size_u32) = *(uint32_t*)(header.dict_size_u32);
258 *(uint64_t*)(header.dst_size_u64) = *(uint64_t*)(header.dst_size_u64);
260 if (*(uint32_t*)(header.dict_size_u32) == 0)
261 (*(uint64_t*)(header.dict_size_u32))++;
263 buffer_size = BB_MIN(*(uint64_t*)(header.dst_size_u64), *(uint32_t*)(header.dict_size_u32));
264 buffer = bb_xmalloc(buffer_size);
267 int num_probs;
269 num_probs = BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE
270 + (BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE << (lc + lp));
271 p = bb_xmalloc(num_probs * sizeof(*p));
272 num_probs += BB_ARCHIVE_UNLZMA_LZMA_LITERAL - BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE;
273 for (i = 0; i < num_probs; i++)
274 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
277 rc = bb_archive_unlzma_rc_init(xstate->src_fd); /*, RC_BUFFER_SIZE); */
279 while (global_pos + buffer_pos < (*(uint64_t*)(header.dst_size_u64))) {
280 int pos_state = (buffer_pos + global_pos) & pos_state_mask;
281 uint16_t *prob = p + BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH
282 + (state << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX) + pos_state;
284 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob)) {
285 static const char next_state[BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES] =
286 { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
287 int mi = 1;
289 prob = (p + BB_ARCHIVE_UNLZMA_LZMA_LITERAL
290 + (BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc)
291 + (previous_byte >> (8 - lc))
296 if (state >= BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES) {
297 int match_byte;
298 uint32_t pos;
300 pos = buffer_pos - rep0;
301 if ((int32_t)pos < 0) {
302 pos += *(uint32_t*)(header.dict_size_u32);
303 if ((int32_t)pos < 0)
304 goto bad;
306 match_byte = buffer[pos];
307 do {
308 int bit;
310 match_byte <<= 1;
311 bit = match_byte & 0x100;
312 bit ^= (bb_archive_unlzma_rc_get_bit(rc, prob + 0x100 + bit + mi, &mi) << 8); /* 0x100 or 0 */
313 if (bit)
314 break;
315 } while (mi < 0x100);
317 while (mi < 0x100) {
318 bb_archive_unlzma_rc_get_bit(rc, prob + mi, &mi);
321 state = next_state[state];
323 previous_byte = (uint8_t) mi;
324 len = 1;
325 goto one_byte2;
326 } else {
327 int num_bits;
328 int offset;
329 uint16_t *prob2;
330 #define prob_len prob2
332 prob2 = p + BB_ARCHIVE_UNLZMA_LZMA_IS_REP + state;
333 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob2)) {
334 rep3 = rep2;
335 rep2 = rep1;
336 rep1 = rep0;
337 state = state < BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES ? 0 : 3;
338 prob2 = p + BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER;
339 } else {
340 prob2 += BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0 - BB_ARCHIVE_UNLZMA_LZMA_IS_REP;
341 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob2)) {
342 prob2 = (p + BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG
343 + (state << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX)
344 + pos_state
346 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob2)) {
347 state = state < BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES ? 9 : 11;
348 len = 1;
349 goto string;
351 } else {
352 uint32_t distance;
354 prob2 += BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1 - BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0;
355 distance = rep1;
356 if (bb_archive_unlzma_rc_is_bit_1(rc, prob2)) {
357 prob2 += BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2 - BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1;
358 distance = rep2;
359 if (bb_archive_unlzma_rc_is_bit_1(rc, prob2)) {
360 distance = rep3;
361 rep3 = rep2;
363 rep2 = rep1;
365 rep1 = rep0;
366 rep0 = distance;
368 state = state < BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES ? 8 : 11;
369 prob2 = p + BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER;
372 prob_len = prob2 + BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE;
373 num_bits = BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS;
374 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob_len)) {
375 prob_len += BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW - BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
376 + (pos_state << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS);
377 offset = 0;
378 } else {
379 prob_len += BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2 - BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE;
380 if (!bb_archive_unlzma_rc_is_bit_1(rc, prob_len)) {
381 prob_len += BB_ARCHIVE_UNLZMA_LZMA_LEN_MID - BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
382 + (pos_state << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS);
383 offset = 1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS;
384 num_bits += BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS - BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS;
385 } else {
386 prob_len += BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH - BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2;
387 offset = ((1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS)
388 + (1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS));
389 num_bits += BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS - BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS;
392 bb_archive_unlzma_rc_bit_tree_decode(rc, prob_len, num_bits, &len);
393 len += offset;
395 if (state < 4) {
396 int pos_slot;
397 uint16_t *prob3;
399 state += BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES;
400 prob3 = p + BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT +
401 ((len < BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES ? len :
402 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES - 1)
403 << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS);
404 bb_archive_unlzma_rc_bit_tree_decode(rc, prob3,
405 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS, &pos_slot);
406 rep0 = pos_slot;
407 if (pos_slot >= BB_ARCHIVE_UNLZMA_LZMA_START_POS_MODEL_INDEX) {
408 int i2, mi2, num_bits2 = (pos_slot >> 1) - 1;
409 rep0 = 2 | (pos_slot & 1);
410 if (pos_slot < BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX) {
411 rep0 <<= num_bits2;
412 prob3 = p + BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS + rep0 - pos_slot - 1;
413 } else {
414 for (; num_bits2 != BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS; num_bits2--)
415 rep0 = (rep0 << 1) | bb_archive_unlzma_rc_direct_bit(rc);
416 rep0 <<= BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS;
417 // Note: (int32_t)rep0 may be < 0 here
418 // (I have linux-3.3.4.tar.lzma which has it).
419 // I moved the check after "++rep0 == 0" check below.
420 prob3 = p + BB_ARCHIVE_UNLZMA_LZMA_ALIGN;
422 i2 = 1;
423 mi2 = 1;
424 while (num_bits2--) {
425 if (bb_archive_unlzma_rc_get_bit(rc, prob3 + mi2, &mi2))
426 rep0 |= i2;
427 i2 <<= 1;
430 rep0++;
431 if ((int32_t)rep0 <= 0) {
432 if (rep0 == 0)
433 break;
434 dbg("%d rep0:%d", __LINE__, rep0);
435 goto bad;
439 len += BB_ARCHIVE_UNLZMA_LZMA_MATCH_MIN_LEN;
441 * LZMA SDK has this optimized:
442 * it precalculates size and copies many bytes
443 * in a loop with simpler checks, a-la:
444 * do
445 * *(dest) = *(dest + ofs);
446 * while (++dest != lim);
447 * and
448 * do {
449 * buffer[buffer_pos++] = buffer[pos];
450 * if (++pos == header.dict_size)
451 * pos = 0;
452 * } while (--cur_len != 0);
453 * Our code is slower (more checks per byte copy):
455 string:
456 do {
457 uint32_t pos = buffer_pos - rep0;
458 if ((int32_t)pos < 0) {
459 pos += *(uint32_t*)(header.dict_size_u32);
460 /* bug 10436 has an example file where this triggers: */
461 //if ((int32_t)pos < 0)
462 // goto bad;
463 /* more stringent test (see unzip_bad_lzma_1.zip): */
464 if (pos >= buffer_size)
465 goto bad;
467 previous_byte = buffer[pos];
468 one_byte2:
469 buffer[buffer_pos++] = previous_byte;
470 if (buffer_pos == *(uint32_t*)(header.dict_size_u32)) {
471 buffer_pos = 0;
472 global_pos += *(uint32_t*)(header.dict_size_u32);
473 if (bb_archive_transformer_write(xstate, buffer, *(uint32_t*)(header.dict_size_u32)) != (ssize_t)(*(uint32_t*)(header.dict_size_u32)))
474 goto bad;
475 total_written += *(uint32_t*)(header.dict_size_u32);
477 len--;
478 } while (len != 0 && buffer_pos < (*(uint64_t*)(header.dst_size_u64)));
479 /* FIXME: ...........^^^^^
480 * shouldn't it be "global_pos + buffer_pos < header.dst_size"?
481 * It probably should, but it is a "do we accidentally
482 * unpack more bytes than expected?" check - which
483 * never happens for well-formed compression data...
489 total_written += buffer_pos;
490 if (bb_archive_transformer_write(xstate, buffer, buffer_pos) != (ssize_t)buffer_pos) {
491 bad:
492 /* One of our users, bbunpack(), expects _us_ to emit
493 * the error message (since it's the best place to give
494 * potentially more detailed information).
495 * Do not fail silently.
497 bb_simple_error_msg("corrupted data");
498 total_written = -1; /* failure */
500 bb_archive_unlzma_rc_free(rc);
501 free(p);
502 free(buffer);
503 return total_written;
506 /* cleanup */
507 #undef dbg
508 #undef RC_BUFFER
509 #undef RC_BUFFER_SIZE
510 #undef RC_TOP_BITS
511 #undef RC_MOVE_BITS
512 #undef RC_MODEL_TOTAL_BITS
513 #undef prob_len