1 /* vi: set sw=4 ts=4: */
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this tarball for details.
13 #include "unarchive.h"
15 #if ENABLE_FEATURE_LZMA_FAST
16 # define speed_inline ALWAYS_INLINE
26 /* Was keeping rc on stack in unlzma and separately allocating buffer,
27 * but with "buffer 'attached to' allocated rc" code is smaller: */
28 /* uint8_t *buffer; */
29 #define RC_BUFFER ((uint8_t*)(rc+1))
33 /* Had provisions for variable buffer, but we don't need it here */
34 /* int buffer_size; */
35 #define RC_BUFFER_SIZE 0x10000
42 #define RC_TOP_BITS 24
43 #define RC_MOVE_BITS 5
44 #define RC_MODEL_TOTAL_BITS 11
47 /* Called twice: once at startup and once in rc_normalize() */
48 static void rc_read(rc_t
*rc
)
50 int buffer_size
= safe_read(rc
->fd
, RC_BUFFER
, RC_BUFFER_SIZE
);
52 bb_error_msg_and_die("unexpected EOF");
54 rc
->buffer_end
= RC_BUFFER
+ buffer_size
;
58 static rc_t
* rc_init(int fd
) /*, int buffer_size) */
63 rc
= xmalloc(sizeof(*rc
) + RC_BUFFER_SIZE
);
66 /* rc->buffer_size = buffer_size; */
67 rc
->buffer_end
= RC_BUFFER
+ RC_BUFFER_SIZE
;
68 rc
->ptr
= rc
->buffer_end
;
71 rc
->range
= 0xFFFFFFFF;
72 for (i
= 0; i
< 5; i
++) {
73 if (rc
->ptr
>= rc
->buffer_end
)
75 rc
->code
= (rc
->code
<< 8) | *rc
->ptr
++;
81 static ALWAYS_INLINE
void rc_free(rc_t
*rc
)
86 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_0_helper() */
87 static void rc_do_normalize(rc_t
*rc
)
89 if (rc
->ptr
>= rc
->buffer_end
)
92 rc
->code
= (rc
->code
<< 8) | *rc
->ptr
++;
94 static ALWAYS_INLINE
void rc_normalize(rc_t
*rc
)
96 if (rc
->range
< (1 << RC_TOP_BITS
)) {
101 /* rc_is_bit_0 is called 9 times */
102 /* Why rc_is_bit_0_helper exists?
103 * Because we want to always expose (rc->code < rc->bound) to optimizer.
104 * Thus rc_is_bit_0 is always inlined, and rc_is_bit_0_helper is inlined
105 * only if we compile for speed.
107 static speed_inline
uint32_t rc_is_bit_0_helper(rc_t
*rc
, uint16_t *p
)
110 rc
->bound
= *p
* (rc
->range
>> RC_MODEL_TOTAL_BITS
);
113 static ALWAYS_INLINE
int rc_is_bit_0(rc_t
*rc
, uint16_t *p
)
115 uint32_t t
= rc_is_bit_0_helper(rc
, p
);
119 /* Called ~10 times, but very small, thus inlined */
120 static speed_inline
void rc_update_bit_0(rc_t
*rc
, uint16_t *p
)
122 rc
->range
= rc
->bound
;
123 *p
+= ((1 << RC_MODEL_TOTAL_BITS
) - *p
) >> RC_MOVE_BITS
;
125 static speed_inline
void rc_update_bit_1(rc_t
*rc
, uint16_t *p
)
127 rc
->range
-= rc
->bound
;
128 rc
->code
-= rc
->bound
;
129 *p
-= *p
>> RC_MOVE_BITS
;
132 /* Called 4 times in unlzma loop */
133 static int rc_get_bit(rc_t
*rc
, uint16_t *p
, int *symbol
)
135 if (rc_is_bit_0(rc
, p
)) {
136 rc_update_bit_0(rc
, p
);
140 rc_update_bit_1(rc
, p
);
141 *symbol
= *symbol
* 2 + 1;
147 static ALWAYS_INLINE
int rc_direct_bit(rc_t
*rc
)
151 if (rc
->code
>= rc
->range
) {
152 rc
->code
-= rc
->range
;
159 static speed_inline
void
160 rc_bit_tree_decode(rc_t
*rc
, uint16_t *p
, int num_levels
, int *symbol
)
166 rc_get_bit(rc
, p
+ *symbol
, symbol
);
167 *symbol
-= 1 << num_levels
;
175 } __attribute__ ((packed
)) lzma_header_t
;
178 /* #defines will force compiler to compute/optimize each one with each usage.
179 * Have heart and use enum instead. */
181 LZMA_BASE_SIZE
= 1846,
184 LZMA_NUM_POS_BITS_MAX
= 4,
186 LZMA_LEN_NUM_LOW_BITS
= 3,
187 LZMA_LEN_NUM_MID_BITS
= 3,
188 LZMA_LEN_NUM_HIGH_BITS
= 8,
191 LZMA_LEN_CHOICE_2
= (LZMA_LEN_CHOICE
+ 1),
192 LZMA_LEN_LOW
= (LZMA_LEN_CHOICE_2
+ 1),
193 LZMA_LEN_MID
= (LZMA_LEN_LOW \
194 + (1 << (LZMA_NUM_POS_BITS_MAX
+ LZMA_LEN_NUM_LOW_BITS
))),
195 LZMA_LEN_HIGH
= (LZMA_LEN_MID \
196 + (1 << (LZMA_NUM_POS_BITS_MAX
+ LZMA_LEN_NUM_MID_BITS
))),
197 LZMA_NUM_LEN_PROBS
= (LZMA_LEN_HIGH
+ (1 << LZMA_LEN_NUM_HIGH_BITS
)),
199 LZMA_NUM_STATES
= 12,
200 LZMA_NUM_LIT_STATES
= 7,
202 LZMA_START_POS_MODEL_INDEX
= 4,
203 LZMA_END_POS_MODEL_INDEX
= 14,
204 LZMA_NUM_FULL_DISTANCES
= (1 << (LZMA_END_POS_MODEL_INDEX
>> 1)),
206 LZMA_NUM_POS_SLOT_BITS
= 6,
207 LZMA_NUM_LEN_TO_POS_STATES
= 4,
209 LZMA_NUM_ALIGN_BITS
= 4,
211 LZMA_MATCH_MIN_LEN
= 2,
214 LZMA_IS_REP
= (LZMA_IS_MATCH
+ (LZMA_NUM_STATES
<< LZMA_NUM_POS_BITS_MAX
)),
215 LZMA_IS_REP_G0
= (LZMA_IS_REP
+ LZMA_NUM_STATES
),
216 LZMA_IS_REP_G1
= (LZMA_IS_REP_G0
+ LZMA_NUM_STATES
),
217 LZMA_IS_REP_G2
= (LZMA_IS_REP_G1
+ LZMA_NUM_STATES
),
218 LZMA_IS_REP_0_LONG
= (LZMA_IS_REP_G2
+ LZMA_NUM_STATES
),
219 LZMA_POS_SLOT
= (LZMA_IS_REP_0_LONG \
220 + (LZMA_NUM_STATES
<< LZMA_NUM_POS_BITS_MAX
)),
221 LZMA_SPEC_POS
= (LZMA_POS_SLOT \
222 + (LZMA_NUM_LEN_TO_POS_STATES
<< LZMA_NUM_POS_SLOT_BITS
)),
223 LZMA_ALIGN
= (LZMA_SPEC_POS \
224 + LZMA_NUM_FULL_DISTANCES
- LZMA_END_POS_MODEL_INDEX
),
225 LZMA_LEN_CODER
= (LZMA_ALIGN
+ (1 << LZMA_NUM_ALIGN_BITS
)),
226 LZMA_REP_LEN_CODER
= (LZMA_LEN_CODER
+ LZMA_NUM_LEN_PROBS
),
227 LZMA_LITERAL
= (LZMA_REP_LEN_CODER
+ LZMA_NUM_LEN_PROBS
),
231 USE_DESKTOP(long long) int FAST_FUNC
232 unpack_lzma_stream(int src_fd
, int dst_fd
)
234 USE_DESKTOP(long long total_written
= 0;)
235 lzma_header_t header
;
237 uint32_t pos_state_mask
;
238 uint32_t literal_pos_mask
;
248 uint8_t previous_byte
= 0;
249 size_t buffer_pos
= 0, global_pos
= 0;
252 uint32_t rep0
= 1, rep1
= 1, rep2
= 1, rep3
= 1;
254 xread(src_fd
, &header
, sizeof(header
));
256 if (header
.pos
>= (9 * 5 * 5))
257 bb_error_msg_and_die("bad header");
262 pos_state_mask
= (1 << pb
) - 1;
263 literal_pos_mask
= (1 << lp
) - 1;
265 header
.dict_size
= SWAP_LE32(header
.dict_size
);
266 header
.dst_size
= SWAP_LE64(header
.dst_size
);
268 if (header
.dict_size
== 0)
269 header
.dict_size
= 1;
271 buffer
= xmalloc(MIN(header
.dst_size
, header
.dict_size
));
273 num_probs
= LZMA_BASE_SIZE
+ (LZMA_LIT_SIZE
<< (lc
+ lp
));
274 p
= xmalloc(num_probs
* sizeof(*p
));
275 num_probs
= LZMA_LITERAL
+ (LZMA_LIT_SIZE
<< (lc
+ lp
));
276 for (i
= 0; i
< num_probs
; i
++)
277 p
[i
] = (1 << RC_MODEL_TOTAL_BITS
) >> 1;
279 rc
= rc_init(src_fd
); /*, RC_BUFFER_SIZE); */
281 while (global_pos
+ buffer_pos
< header
.dst_size
) {
282 int pos_state
= (buffer_pos
+ global_pos
) & pos_state_mask
;
284 prob
= p
+ LZMA_IS_MATCH
+ (state
<< LZMA_NUM_POS_BITS_MAX
) + pos_state
;
285 if (rc_is_bit_0(rc
, prob
)) {
287 rc_update_bit_0(rc
, prob
);
288 prob
= (p
+ LZMA_LITERAL
289 + (LZMA_LIT_SIZE
* ((((buffer_pos
+ global_pos
) & literal_pos_mask
) << lc
)
290 + (previous_byte
>> (8 - lc
))
295 if (state
>= LZMA_NUM_LIT_STATES
) {
298 pos
= buffer_pos
- rep0
;
299 while (pos
>= header
.dict_size
)
300 pos
+= header
.dict_size
;
301 match_byte
= buffer
[pos
];
306 bit
= match_byte
& 0x100;
307 prob_lit
= prob
+ 0x100 + bit
+ mi
;
308 bit
^= (rc_get_bit(rc
, prob_lit
, &mi
) << 8); /* 0x100 or 0 */
311 } while (mi
< 0x100);
314 prob_lit
= prob
+ mi
;
315 rc_get_bit(rc
, prob_lit
, &mi
);
324 previous_byte
= (uint8_t) mi
;
325 #if ENABLE_FEATURE_LZMA_FAST
327 buffer
[buffer_pos
++] = previous_byte
;
328 if (buffer_pos
== header
.dict_size
) {
330 global_pos
+= header
.dict_size
;
331 if (full_write(dst_fd
, buffer
, header
.dict_size
) != (ssize_t
)header
.dict_size
)
333 USE_DESKTOP(total_written
+= header
.dict_size
;)
343 rc_update_bit_1(rc
, prob
);
344 prob
= p
+ LZMA_IS_REP
+ state
;
345 if (rc_is_bit_0(rc
, prob
)) {
346 rc_update_bit_0(rc
, prob
);
350 state
= state
< LZMA_NUM_LIT_STATES
? 0 : 3;
351 prob
= p
+ LZMA_LEN_CODER
;
353 rc_update_bit_1(rc
, prob
);
354 prob
= p
+ LZMA_IS_REP_G0
+ state
;
355 if (rc_is_bit_0(rc
, prob
)) {
356 rc_update_bit_0(rc
, prob
);
357 prob
= (p
+ LZMA_IS_REP_0_LONG
358 + (state
<< LZMA_NUM_POS_BITS_MAX
)
361 if (rc_is_bit_0(rc
, prob
)) {
362 rc_update_bit_0(rc
, prob
);
364 state
= state
< LZMA_NUM_LIT_STATES
? 9 : 11;
365 #if ENABLE_FEATURE_LZMA_FAST
366 pos
= buffer_pos
- rep0
;
367 while (pos
>= header
.dict_size
)
368 pos
+= header
.dict_size
;
369 previous_byte
= buffer
[pos
];
376 rc_update_bit_1(rc
, prob
);
381 rc_update_bit_1(rc
, prob
);
382 prob
= p
+ LZMA_IS_REP_G1
+ state
;
383 if (rc_is_bit_0(rc
, prob
)) {
384 rc_update_bit_0(rc
, prob
);
387 rc_update_bit_1(rc
, prob
);
388 prob
= p
+ LZMA_IS_REP_G2
+ state
;
389 if (rc_is_bit_0(rc
, prob
)) {
390 rc_update_bit_0(rc
, prob
);
393 rc_update_bit_1(rc
, prob
);
402 state
= state
< LZMA_NUM_LIT_STATES
? 8 : 11;
403 prob
= p
+ LZMA_REP_LEN_CODER
;
406 prob_len
= prob
+ LZMA_LEN_CHOICE
;
407 if (rc_is_bit_0(rc
, prob_len
)) {
408 rc_update_bit_0(rc
, prob_len
);
409 prob_len
= (prob
+ LZMA_LEN_LOW
410 + (pos_state
<< LZMA_LEN_NUM_LOW_BITS
));
412 num_bits
= LZMA_LEN_NUM_LOW_BITS
;
414 rc_update_bit_1(rc
, prob_len
);
415 prob_len
= prob
+ LZMA_LEN_CHOICE_2
;
416 if (rc_is_bit_0(rc
, prob_len
)) {
417 rc_update_bit_0(rc
, prob_len
);
418 prob_len
= (prob
+ LZMA_LEN_MID
419 + (pos_state
<< LZMA_LEN_NUM_MID_BITS
));
420 offset
= 1 << LZMA_LEN_NUM_LOW_BITS
;
421 num_bits
= LZMA_LEN_NUM_MID_BITS
;
423 rc_update_bit_1(rc
, prob_len
);
424 prob_len
= prob
+ LZMA_LEN_HIGH
;
425 offset
= ((1 << LZMA_LEN_NUM_LOW_BITS
)
426 + (1 << LZMA_LEN_NUM_MID_BITS
));
427 num_bits
= LZMA_LEN_NUM_HIGH_BITS
;
430 rc_bit_tree_decode(rc
, prob_len
, num_bits
, &len
);
436 state
+= LZMA_NUM_LIT_STATES
;
437 prob
= p
+ LZMA_POS_SLOT
+
438 ((len
< LZMA_NUM_LEN_TO_POS_STATES
? len
:
439 LZMA_NUM_LEN_TO_POS_STATES
- 1)
440 << LZMA_NUM_POS_SLOT_BITS
);
441 rc_bit_tree_decode(rc
, prob
, LZMA_NUM_POS_SLOT_BITS
,
443 if (pos_slot
>= LZMA_START_POS_MODEL_INDEX
) {
444 num_bits
= (pos_slot
>> 1) - 1;
445 rep0
= 2 | (pos_slot
& 1);
446 if (pos_slot
< LZMA_END_POS_MODEL_INDEX
) {
448 prob
= p
+ LZMA_SPEC_POS
+ rep0
- pos_slot
- 1;
450 num_bits
-= LZMA_NUM_ALIGN_BITS
;
452 rep0
= (rep0
<< 1) | rc_direct_bit(rc
);
453 prob
= p
+ LZMA_ALIGN
;
454 rep0
<<= LZMA_NUM_ALIGN_BITS
;
455 num_bits
= LZMA_NUM_ALIGN_BITS
;
460 if (rc_get_bit(rc
, prob
+ mi
, &mi
))
470 len
+= LZMA_MATCH_MIN_LEN
;
471 SKIP_FEATURE_LZMA_FAST(string
:)
473 pos
= buffer_pos
- rep0
;
474 while (pos
>= header
.dict_size
)
475 pos
+= header
.dict_size
;
476 previous_byte
= buffer
[pos
];
477 SKIP_FEATURE_LZMA_FAST(one_byte2
:)
478 buffer
[buffer_pos
++] = previous_byte
;
479 if (buffer_pos
== header
.dict_size
) {
481 global_pos
+= header
.dict_size
;
482 if (full_write(dst_fd
, buffer
, header
.dict_size
) != (ssize_t
)header
.dict_size
)
484 USE_DESKTOP(total_written
+= header
.dict_size
;)
487 } while (len
!= 0 && buffer_pos
< header
.dst_size
);
492 SKIP_DESKTOP(int total_written
= 0; /* success */)
493 USE_DESKTOP(total_written
+= buffer_pos
;)
494 if (full_write(dst_fd
, buffer
, buffer_pos
) != (ssize_t
)buffer_pos
) {
496 total_written
= -1; /* failure */
501 return total_written
;