1 /* vi: set sw=4 ts=4: */
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
13 #include "bb/lib/public.h"
16 # define dbg(...) bb_error_msg(__VA_ARGS__)
18 # define dbg(...) ((void)0)
21 struct bb_archive_unlzma_rc_t
{
25 /* Was keeping rc on stack in unlzma and separately allocating buffer,
26 * but with "buffer 'attached to' allocated rc" code is smaller: */
27 /* uint8_t *buffer; */
28 #define RC_BUFFER ((uint8_t*)(rc+1))
32 /* Had provisions for variable buffer, but we don't need it here */
33 /* int buffer_size; */
34 #define RC_BUFFER_SIZE 0x10000
41 #define RC_TOP_BITS 24
42 #define RC_MOVE_BITS 5
43 #define RC_MODEL_TOTAL_BITS 11
46 /* Called once in rc_do_normalize() */
47 BB_STATIC
void bb_archive_unlzma_rc_read(struct bb_archive_unlzma_rc_t
*rc
)
49 int buffer_size
= bb_safe_read(rc
->fd
, RC_BUFFER
, RC_BUFFER_SIZE
);
50 //TODO: return -1 instead
51 //This will make unlzma delete broken unpacked file on unpack errors
53 bb_simple_error_msg_and_die("unexpected EOF");
54 rc
->buffer_end
= RC_BUFFER
+ buffer_size
;
58 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
59 BB_STATIC
void bb_archive_unlzma_rc_do_normalize(struct bb_archive_unlzma_rc_t
*rc
)
61 if (rc
->ptr
>= rc
->buffer_end
)
62 bb_archive_unlzma_rc_read(rc
);
64 rc
->code
= (rc
->code
<< 8) | *rc
->ptr
++;
67 BB_STATIC
void bb_archive_unlzma_rc_normalize(struct bb_archive_unlzma_rc_t
*rc
)
69 if (rc
->range
< (1 << RC_TOP_BITS
)) {
70 bb_archive_unlzma_rc_do_normalize(rc
);
75 BB_STATIC
struct bb_archive_unlzma_rc_t
* bb_archive_unlzma_rc_init(int fd
) /*, int buffer_size) */
78 struct bb_archive_unlzma_rc_t
*rc
;
80 rc
= bb_xzalloc(sizeof(*rc
) + RC_BUFFER_SIZE
);
83 /* rc->ptr = rc->buffer_end; */
85 for (i
= 0; i
< 5; i
++) {
86 bb_archive_unlzma_rc_do_normalize(rc
);
88 rc
->range
= 0xffffffff;
93 BB_STATIC
void bb_archive_unlzma_rc_free(struct bb_archive_unlzma_rc_t
*rc
)
98 /* rc_is_bit_1 is called 9 times */
99 BB_STATIC
int bb_archive_unlzma_rc_is_bit_1(struct bb_archive_unlzma_rc_t
*rc
,
102 bb_archive_unlzma_rc_normalize(rc
);
103 rc
->bound
= *p
* (rc
->range
>> RC_MODEL_TOTAL_BITS
);
104 if (rc
->code
< rc
->bound
) {
105 rc
->range
= rc
->bound
;
106 *p
+= ((1 << RC_MODEL_TOTAL_BITS
) - *p
) >> RC_MOVE_BITS
;
109 rc
->range
-= rc
->bound
;
110 rc
->code
-= rc
->bound
;
111 *p
-= *p
>> RC_MOVE_BITS
;
115 /* Called 4 times in unlzma loop */
116 BB_STATIC
int bb_archive_unlzma_rc_get_bit(struct bb_archive_unlzma_rc_t
*rc
,
117 uint16_t *p
, int *symbol
)
119 int ret
= bb_archive_unlzma_rc_is_bit_1(rc
, p
);
120 *symbol
= *symbol
* 2 + ret
;
125 BB_STATIC
int bb_archive_unlzma_rc_direct_bit(struct bb_archive_unlzma_rc_t
*rc
)
127 bb_archive_unlzma_rc_normalize(rc
);
129 if (rc
->code
>= rc
->range
) {
130 rc
->code
-= rc
->range
;
137 BB_STATIC
void bb_archive_unlzma_rc_bit_tree_decode(struct bb_archive_unlzma_rc_t
*rc
, uint16_t *p
,
138 int num_levels
, int *symbol
)
144 bb_archive_unlzma_rc_get_bit(rc
, p
+ *symbol
, symbol
);
145 *symbol
-= 1 << num_levels
;
149 struct bb_archive_unlzma_lzma_header_t
{
151 uint8_t dict_size_u32
[4];
152 uint8_t dst_size_u64
[8];
155 /* #defines will force compiler to compute/optimize each one with each usage.
156 * Have heart and use enum instead. */
158 BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE
= 1846,
159 BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE
= 768,
161 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
= 4,
163 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
= 3,
164 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS
= 3,
165 BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS
= 8,
167 BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
= 0,
168 BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
+ 1),
169 BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
+ 1),
170 BB_ARCHIVE_UNLZMA_LZMA_LEN_MID
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW \
171 + (1 << (BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
172 + BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
))),
173 BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_MID \
174 + (1 << (BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
175 + BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS
))),
176 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH
177 + (1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS
)),
179 BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
= 12,
180 BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
= 7,
182 BB_ARCHIVE_UNLZMA_LZMA_START_POS_MODEL_INDEX
= 4,
183 BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX
= 14,
184 BB_ARCHIVE_UNLZMA_LZMA_NUM_FULL_DISTANCES
= (1
185 << (BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX
>> 1)),
187 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS
= 6,
188 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES
= 4,
190 BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS
= 4,
192 BB_ARCHIVE_UNLZMA_LZMA_MATCH_MIN_LEN
= 2,
194 BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH
= 0,
195 BB_ARCHIVE_UNLZMA_LZMA_IS_REP
= (BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH
196 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
<< BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
)),
197 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0
= (BB_ARCHIVE_UNLZMA_LZMA_IS_REP
198 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
),
199 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1
= (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0
200 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
),
201 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2
= (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1
202 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
),
203 BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG
= (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2
204 + BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
),
205 BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT
= (BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG \
206 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
<< BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
)),
207 BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS
= (BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT \
208 + (BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES
209 << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS
)),
210 BB_ARCHIVE_UNLZMA_LZMA_ALIGN
= (BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS \
211 + BB_ARCHIVE_UNLZMA_LZMA_NUM_FULL_DISTANCES
212 - BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX
),
213 BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER
= (BB_ARCHIVE_UNLZMA_LZMA_ALIGN
214 + (1 << BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS
)),
215 BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER
= (BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER
216 + BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS
),
217 BB_ARCHIVE_UNLZMA_LZMA_LITERAL
= (BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER
218 + BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_PROBS
),
222 BB_STATIC
long bb_archive_unpack_lzma_stream(bb_archive_transformer_state_t
*xstate
)
224 long total_written
= 0;
225 struct bb_archive_unlzma_lzma_header_t header
;
227 uint32_t pos_state_mask
;
228 uint32_t literal_pos_mask
;
230 struct bb_archive_unlzma_rc_t
*rc
;
233 uint32_t buffer_size
;
234 uint8_t previous_byte
= 0;
235 size_t buffer_pos
= 0, global_pos
= 0;
238 uint32_t rep0
= 1, rep1
= 1, rep2
= 1, rep3
= 1;
240 if (bb_full_read(xstate
->src_fd
, &header
, sizeof(header
)) != sizeof(header
)
241 || header
.pos_u8
>= (9 * 5 * 5)
243 bb_simple_error_msg("bad lzma header");
247 i
= header
.pos_u8
/ 9;
248 lc
= header
.pos_u8
% 9;
251 pos_state_mask
= (1 << pb
) - 1;
252 literal_pos_mask
= (1 << lp
) - 1;
254 /* Example values from linux-3.3.4.tar.lzma:
255 * dict_size: 64M, dst_size: 2^64-1
257 *(uint32_t*)(header
.dict_size_u32
) = *(uint32_t*)(header
.dict_size_u32
);
258 *(uint64_t*)(header
.dst_size_u64
) = *(uint64_t*)(header
.dst_size_u64
);
260 if (*(uint32_t*)(header
.dict_size_u32
) == 0)
261 (*(uint64_t*)(header
.dict_size_u32
))++;
263 buffer_size
= BB_MIN(*(uint64_t*)(header
.dst_size_u64
), *(uint32_t*)(header
.dict_size_u32
));
264 buffer
= bb_xmalloc(buffer_size
);
269 num_probs
= BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE
270 + (BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE
<< (lc
+ lp
));
271 p
= bb_xmalloc(num_probs
* sizeof(*p
));
272 num_probs
+= BB_ARCHIVE_UNLZMA_LZMA_LITERAL
- BB_ARCHIVE_UNLZMA_LZMA_BASE_SIZE
;
273 for (i
= 0; i
< num_probs
; i
++)
274 p
[i
] = (1 << RC_MODEL_TOTAL_BITS
) >> 1;
277 rc
= bb_archive_unlzma_rc_init(xstate
->src_fd
); /*, RC_BUFFER_SIZE); */
279 while (global_pos
+ buffer_pos
< (*(uint64_t*)(header
.dst_size_u64
))) {
280 int pos_state
= (buffer_pos
+ global_pos
) & pos_state_mask
;
281 uint16_t *prob
= p
+ BB_ARCHIVE_UNLZMA_LZMA_IS_MATCH
282 + (state
<< BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
) + pos_state
;
284 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob
)) {
285 static const char next_state
[BB_ARCHIVE_UNLZMA_LZMA_NUM_STATES
] =
286 { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
289 prob
= (p
+ BB_ARCHIVE_UNLZMA_LZMA_LITERAL
290 + (BB_ARCHIVE_UNLZMA_LZMA_LIT_SIZE
* ((((buffer_pos
+ global_pos
) & literal_pos_mask
) << lc
)
291 + (previous_byte
>> (8 - lc
))
296 if (state
>= BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
) {
300 pos
= buffer_pos
- rep0
;
301 if ((int32_t)pos
< 0) {
302 pos
+= *(uint32_t*)(header
.dict_size_u32
);
303 if ((int32_t)pos
< 0)
306 match_byte
= buffer
[pos
];
311 bit
= match_byte
& 0x100;
312 bit
^= (bb_archive_unlzma_rc_get_bit(rc
, prob
+ 0x100 + bit
+ mi
, &mi
) << 8); /* 0x100 or 0 */
315 } while (mi
< 0x100);
318 bb_archive_unlzma_rc_get_bit(rc
, prob
+ mi
, &mi
);
321 state
= next_state
[state
];
323 previous_byte
= (uint8_t) mi
;
330 #define prob_len prob2
332 prob2
= p
+ BB_ARCHIVE_UNLZMA_LZMA_IS_REP
+ state
;
333 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob2
)) {
337 state
= state
< BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
? 0 : 3;
338 prob2
= p
+ BB_ARCHIVE_UNLZMA_LZMA_LEN_CODER
;
340 prob2
+= BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0
- BB_ARCHIVE_UNLZMA_LZMA_IS_REP
;
341 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob2
)) {
342 prob2
= (p
+ BB_ARCHIVE_UNLZMA_LZMA_IS_REP_0_LONG
343 + (state
<< BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_BITS_MAX
)
346 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob2
)) {
347 state
= state
< BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
? 9 : 11;
354 prob2
+= BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1
- BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G0
;
356 if (bb_archive_unlzma_rc_is_bit_1(rc
, prob2
)) {
357 prob2
+= BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G2
- BB_ARCHIVE_UNLZMA_LZMA_IS_REP_G1
;
359 if (bb_archive_unlzma_rc_is_bit_1(rc
, prob2
)) {
368 state
= state
< BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
? 8 : 11;
369 prob2
= p
+ BB_ARCHIVE_UNLZMA_LZMA_REP_LEN_CODER
;
372 prob_len
= prob2
+ BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
;
373 num_bits
= BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
;
374 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob_len
)) {
375 prob_len
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_LOW
- BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
376 + (pos_state
<< BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
);
379 prob_len
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
- BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE
;
380 if (!bb_archive_unlzma_rc_is_bit_1(rc
, prob_len
)) {
381 prob_len
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_MID
- BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
382 + (pos_state
<< BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS
);
383 offset
= 1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
;
384 num_bits
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS
- BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
;
386 prob_len
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_HIGH
- BB_ARCHIVE_UNLZMA_LZMA_LEN_CHOICE_2
;
387 offset
= ((1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
)
388 + (1 << BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_MID_BITS
));
389 num_bits
+= BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_HIGH_BITS
- BB_ARCHIVE_UNLZMA_LZMA_LEN_NUM_LOW_BITS
;
392 bb_archive_unlzma_rc_bit_tree_decode(rc
, prob_len
, num_bits
, &len
);
399 state
+= BB_ARCHIVE_UNLZMA_LZMA_NUM_LIT_STATES
;
400 prob3
= p
+ BB_ARCHIVE_UNLZMA_LZMA_POS_SLOT
+
401 ((len
< BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES
? len
:
402 BB_ARCHIVE_UNLZMA_LZMA_NUM_LEN_TO_POS_STATES
- 1)
403 << BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS
);
404 bb_archive_unlzma_rc_bit_tree_decode(rc
, prob3
,
405 BB_ARCHIVE_UNLZMA_LZMA_NUM_POS_SLOT_BITS
, &pos_slot
);
407 if (pos_slot
>= BB_ARCHIVE_UNLZMA_LZMA_START_POS_MODEL_INDEX
) {
408 int i2
, mi2
, num_bits2
= (pos_slot
>> 1) - 1;
409 rep0
= 2 | (pos_slot
& 1);
410 if (pos_slot
< BB_ARCHIVE_UNLZMA_LZMA_END_POS_MODEL_INDEX
) {
412 prob3
= p
+ BB_ARCHIVE_UNLZMA_LZMA_SPEC_POS
+ rep0
- pos_slot
- 1;
414 for (; num_bits2
!= BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS
; num_bits2
--)
415 rep0
= (rep0
<< 1) | bb_archive_unlzma_rc_direct_bit(rc
);
416 rep0
<<= BB_ARCHIVE_UNLZMA_LZMA_NUM_ALIGN_BITS
;
417 // Note: (int32_t)rep0 may be < 0 here
418 // (I have linux-3.3.4.tar.lzma which has it).
419 // I moved the check after "++rep0 == 0" check below.
420 prob3
= p
+ BB_ARCHIVE_UNLZMA_LZMA_ALIGN
;
424 while (num_bits2
--) {
425 if (bb_archive_unlzma_rc_get_bit(rc
, prob3
+ mi2
, &mi2
))
431 if ((int32_t)rep0
<= 0) {
434 dbg("%d rep0:%d", __LINE__
, rep0
);
439 len
+= BB_ARCHIVE_UNLZMA_LZMA_MATCH_MIN_LEN
;
441 * LZMA SDK has this optimized:
442 * it precalculates size and copies many bytes
443 * in a loop with simpler checks, a-la:
445 * *(dest) = *(dest + ofs);
446 * while (++dest != lim);
449 * buffer[buffer_pos++] = buffer[pos];
450 * if (++pos == header.dict_size)
452 * } while (--cur_len != 0);
453 * Our code is slower (more checks per byte copy):
457 uint32_t pos
= buffer_pos
- rep0
;
458 if ((int32_t)pos
< 0) {
459 pos
+= *(uint32_t*)(header
.dict_size_u32
);
460 /* bug 10436 has an example file where this triggers: */
461 //if ((int32_t)pos < 0)
463 /* more stringent test (see unzip_bad_lzma_1.zip): */
464 if (pos
>= buffer_size
)
467 previous_byte
= buffer
[pos
];
469 buffer
[buffer_pos
++] = previous_byte
;
470 if (buffer_pos
== *(uint32_t*)(header
.dict_size_u32
)) {
472 global_pos
+= *(uint32_t*)(header
.dict_size_u32
);
473 if (bb_archive_transformer_write(xstate
, buffer
, *(uint32_t*)(header
.dict_size_u32
)) != (ssize_t
)(*(uint32_t*)(header
.dict_size_u32
)))
475 total_written
+= *(uint32_t*)(header
.dict_size_u32
);
478 } while (len
!= 0 && buffer_pos
< (*(uint64_t*)(header
.dst_size_u64
)));
479 /* FIXME: ...........^^^^^
480 * shouldn't it be "global_pos + buffer_pos < header.dst_size"?
481 * It probably should, but it is a "do we accidentally
482 * unpack more bytes than expected?" check - which
483 * never happens for well-formed compression data...
489 total_written
+= buffer_pos
;
490 if (bb_archive_transformer_write(xstate
, buffer
, buffer_pos
) != (ssize_t
)buffer_pos
) {
492 /* One of our users, bbunpack(), expects _us_ to emit
493 * the error message (since it's the best place to give
494 * potentially more detailed information).
495 * Do not fail silently.
497 bb_simple_error_msg("corrupted data");
498 total_written
= -1; /* failure */
500 bb_archive_unlzma_rc_free(rc
);
503 return total_written
;
509 #undef RC_BUFFER_SIZE
512 #undef RC_MODEL_TOTAL_BITS