1 ///////////////////////////////////////////////////////////////////////////////
3 /// \file lzma_encoder_optimum_normal.c
7 // This file has been put into the public domain.
8 // You can do whatever you want with this file.
10 ///////////////////////////////////////////////////////////////////////////////
12 #include "lzma_encoder_private.h"
14 #include "memcmplen.h"
22 get_literal_price(const lzma_lzma1_encoder
*const coder
, const uint32_t pos
,
23 const uint32_t prev_byte
, const bool match_mode
,
24 uint32_t match_byte
, uint32_t symbol
)
26 const probability
*const subcoder
= literal_subcoder(coder
->literal
,
27 coder
->literal_context_bits
, coder
->literal_pos_mask
,
33 price
= rc_bittree_price(subcoder
, 8, symbol
);
35 uint32_t offset
= 0x100;
36 symbol
+= UINT32_C(1) << 8;
41 const uint32_t match_bit
= match_byte
& offset
;
42 const uint32_t subcoder_index
43 = offset
+ match_bit
+ (symbol
>> 8);
44 const uint32_t bit
= (symbol
>> 7) & 1;
45 price
+= rc_bit_price(subcoder
[subcoder_index
], bit
);
48 offset
&= ~(match_byte
^ symbol
);
50 } while (symbol
< (UINT32_C(1) << 16));
57 static inline uint32_t
58 get_len_price(const lzma_length_encoder
*const lencoder
,
59 const uint32_t len
, const uint32_t pos_state
)
61 // NOTE: Unlike the other price tables, length prices are updated
63 return lencoder
->prices
[pos_state
][len
- MATCH_LEN_MIN
];
67 static inline uint32_t
68 get_short_rep_price(const lzma_lzma1_encoder
*const coder
,
69 const lzma_lzma_state state
, const uint32_t pos_state
)
71 return rc_bit_0_price(coder
->is_rep0
[state
])
72 + rc_bit_0_price(coder
->is_rep0_long
[state
][pos_state
]);
76 static inline uint32_t
77 get_pure_rep_price(const lzma_lzma1_encoder
*const coder
, const uint32_t rep_index
,
78 const lzma_lzma_state state
, uint32_t pos_state
)
83 price
= rc_bit_0_price(coder
->is_rep0
[state
]);
84 price
+= rc_bit_1_price(coder
->is_rep0_long
[state
][pos_state
]);
86 price
= rc_bit_1_price(coder
->is_rep0
[state
]);
89 price
+= rc_bit_0_price(coder
->is_rep1
[state
]);
91 price
+= rc_bit_1_price(coder
->is_rep1
[state
]);
92 price
+= rc_bit_price(coder
->is_rep2
[state
],
101 static inline uint32_t
102 get_rep_price(const lzma_lzma1_encoder
*const coder
, const uint32_t rep_index
,
103 const uint32_t len
, const lzma_lzma_state state
,
104 const uint32_t pos_state
)
106 return get_len_price(&coder
->rep_len_encoder
, len
, pos_state
)
107 + get_pure_rep_price(coder
, rep_index
, state
, pos_state
);
111 static inline uint32_t
112 get_dist_len_price(const lzma_lzma1_encoder
*const coder
, const uint32_t dist
,
113 const uint32_t len
, const uint32_t pos_state
)
115 const uint32_t dist_state
= get_dist_state(len
);
118 if (dist
< FULL_DISTANCES
) {
119 price
= coder
->dist_prices
[dist_state
][dist
];
121 const uint32_t dist_slot
= get_dist_slot_2(dist
);
122 price
= coder
->dist_slot_prices
[dist_state
][dist_slot
]
123 + coder
->align_prices
[dist
& ALIGN_MASK
];
126 price
+= get_len_price(&coder
->match_len_encoder
, len
, pos_state
);
133 fill_dist_prices(lzma_lzma1_encoder
*coder
)
135 for (uint32_t dist_state
= 0; dist_state
< DIST_STATES
; ++dist_state
) {
137 uint32_t *const dist_slot_prices
138 = coder
->dist_slot_prices
[dist_state
];
140 // Price to encode the dist_slot.
141 for (uint32_t dist_slot
= 0;
142 dist_slot
< coder
->dist_table_size
; ++dist_slot
)
143 dist_slot_prices
[dist_slot
] = rc_bittree_price(
144 coder
->dist_slot
[dist_state
],
145 DIST_SLOT_BITS
, dist_slot
);
147 // For matches with distance >= FULL_DISTANCES, add the price
148 // of the direct bits part of the match distance. (Align bits
149 // are handled by fill_align_prices()).
150 for (uint32_t dist_slot
= DIST_MODEL_END
;
151 dist_slot
< coder
->dist_table_size
;
153 dist_slot_prices
[dist_slot
] += rc_direct_price(
154 ((dist_slot
>> 1) - 1) - ALIGN_BITS
);
156 // Distances in the range [0, 3] are fully encoded with
157 // dist_slot, so they are used for coder->dist_prices
159 for (uint32_t i
= 0; i
< DIST_MODEL_START
; ++i
)
160 coder
->dist_prices
[dist_state
][i
]
161 = dist_slot_prices
[i
];
164 // Distances in the range [4, 127] depend on dist_slot and
165 // dist_special. We do this in a loop separate from the above
166 // loop to avoid redundant calls to get_dist_slot().
167 for (uint32_t i
= DIST_MODEL_START
; i
< FULL_DISTANCES
; ++i
) {
168 const uint32_t dist_slot
= get_dist_slot(i
);
169 const uint32_t footer_bits
= ((dist_slot
>> 1) - 1);
170 const uint32_t base
= (2 | (dist_slot
& 1)) << footer_bits
;
171 const uint32_t price
= rc_bittree_reverse_price(
172 coder
->dist_special
+ base
- dist_slot
- 1,
173 footer_bits
, i
- base
);
175 for (uint32_t dist_state
= 0; dist_state
< DIST_STATES
;
177 coder
->dist_prices
[dist_state
][i
]
178 = price
+ coder
->dist_slot_prices
[
179 dist_state
][dist_slot
];
182 coder
->match_price_count
= 0;
188 fill_align_prices(lzma_lzma1_encoder
*coder
)
190 for (uint32_t i
= 0; i
< ALIGN_SIZE
; ++i
)
191 coder
->align_prices
[i
] = rc_bittree_reverse_price(
192 coder
->dist_align
, ALIGN_BITS
, i
);
194 coder
->align_price_count
= 0;
204 make_literal(lzma_optimal
*optimal
)
206 optimal
->back_prev
= UINT32_MAX
;
207 optimal
->prev_1_is_literal
= false;
212 make_short_rep(lzma_optimal
*optimal
)
214 optimal
->back_prev
= 0;
215 optimal
->prev_1_is_literal
= false;
219 #define is_short_rep(optimal) \
220 ((optimal).back_prev == 0)
224 backward(lzma_lzma1_encoder
*restrict coder
, uint32_t *restrict len_res
,
225 uint32_t *restrict back_res
, uint32_t cur
)
227 coder
->opts_end_index
= cur
;
229 uint32_t pos_mem
= coder
->opts
[cur
].pos_prev
;
230 uint32_t back_mem
= coder
->opts
[cur
].back_prev
;
233 if (coder
->opts
[cur
].prev_1_is_literal
) {
234 make_literal(&coder
->opts
[pos_mem
]);
235 coder
->opts
[pos_mem
].pos_prev
= pos_mem
- 1;
237 if (coder
->opts
[cur
].prev_2
) {
238 coder
->opts
[pos_mem
- 1].prev_1_is_literal
240 coder
->opts
[pos_mem
- 1].pos_prev
241 = coder
->opts
[cur
].pos_prev_2
;
242 coder
->opts
[pos_mem
- 1].back_prev
243 = coder
->opts
[cur
].back_prev_2
;
247 const uint32_t pos_prev
= pos_mem
;
248 const uint32_t back_cur
= back_mem
;
250 back_mem
= coder
->opts
[pos_prev
].back_prev
;
251 pos_mem
= coder
->opts
[pos_prev
].pos_prev
;
253 coder
->opts
[pos_prev
].back_prev
= back_cur
;
254 coder
->opts
[pos_prev
].pos_prev
= cur
;
259 coder
->opts_current_index
= coder
->opts
[0].pos_prev
;
260 *len_res
= coder
->opts
[0].pos_prev
;
261 *back_res
= coder
->opts
[0].back_prev
;
271 static inline uint32_t
272 helper1(lzma_lzma1_encoder
*restrict coder
, lzma_mf
*restrict mf
,
273 uint32_t *restrict back_res
, uint32_t *restrict len_res
,
276 const uint32_t nice_len
= mf
->nice_len
;
279 uint32_t matches_count
;
281 if (mf
->read_ahead
== 0) {
282 len_main
= mf_find(mf
, &matches_count
, coder
->matches
);
284 assert(mf
->read_ahead
== 1);
285 len_main
= coder
->longest_match_length
;
286 matches_count
= coder
->matches_count
;
289 const uint32_t buf_avail
= my_min(mf_avail(mf
) + 1, MATCH_LEN_MAX
);
291 *back_res
= UINT32_MAX
;
296 const uint8_t *const buf
= mf_ptr(mf
) - 1;
298 uint32_t rep_lens
[REPS
];
299 uint32_t rep_max_index
= 0;
301 for (uint32_t i
= 0; i
< REPS
; ++i
) {
302 const uint8_t *const buf_back
= buf
- coder
->reps
[i
] - 1;
304 if (not_equal_16(buf
, buf_back
)) {
309 rep_lens
[i
] = lzma_memcmplen(buf
, buf_back
, 2, buf_avail
);
311 if (rep_lens
[i
] > rep_lens
[rep_max_index
])
315 if (rep_lens
[rep_max_index
] >= nice_len
) {
316 *back_res
= rep_max_index
;
317 *len_res
= rep_lens
[rep_max_index
];
318 mf_skip(mf
, *len_res
- 1);
323 if (len_main
>= nice_len
) {
324 *back_res
= coder
->matches
[matches_count
- 1].dist
+ REPS
;
326 mf_skip(mf
, len_main
- 1);
330 const uint8_t current_byte
= *buf
;
331 const uint8_t match_byte
= *(buf
- coder
->reps
[0] - 1);
333 if (len_main
< 2 && current_byte
!= match_byte
334 && rep_lens
[rep_max_index
] < 2) {
335 *back_res
= UINT32_MAX
;
340 coder
->opts
[0].state
= coder
->state
;
342 const uint32_t pos_state
= position
& coder
->pos_mask
;
344 coder
->opts
[1].price
= rc_bit_0_price(
345 coder
->is_match
[coder
->state
][pos_state
])
346 + get_literal_price(coder
, position
, buf
[-1],
347 !is_literal_state(coder
->state
),
348 match_byte
, current_byte
);
350 make_literal(&coder
->opts
[1]);
352 const uint32_t match_price
= rc_bit_1_price(
353 coder
->is_match
[coder
->state
][pos_state
]);
354 const uint32_t rep_match_price
= match_price
355 + rc_bit_1_price(coder
->is_rep
[coder
->state
]);
357 if (match_byte
== current_byte
) {
358 const uint32_t short_rep_price
= rep_match_price
359 + get_short_rep_price(
360 coder
, coder
->state
, pos_state
);
362 if (short_rep_price
< coder
->opts
[1].price
) {
363 coder
->opts
[1].price
= short_rep_price
;
364 make_short_rep(&coder
->opts
[1]);
368 const uint32_t len_end
= my_max(len_main
, rep_lens
[rep_max_index
]);
371 *back_res
= coder
->opts
[1].back_prev
;
376 coder
->opts
[1].pos_prev
= 0;
378 for (uint32_t i
= 0; i
< REPS
; ++i
)
379 coder
->opts
[0].backs
[i
] = coder
->reps
[i
];
381 uint32_t len
= len_end
;
383 coder
->opts
[len
].price
= RC_INFINITY_PRICE
;
384 } while (--len
>= 2);
387 for (uint32_t i
= 0; i
< REPS
; ++i
) {
388 uint32_t rep_len
= rep_lens
[i
];
392 const uint32_t price
= rep_match_price
+ get_pure_rep_price(
393 coder
, i
, coder
->state
, pos_state
);
396 const uint32_t cur_and_len_price
= price
398 &coder
->rep_len_encoder
,
401 if (cur_and_len_price
< coder
->opts
[rep_len
].price
) {
402 coder
->opts
[rep_len
].price
= cur_and_len_price
;
403 coder
->opts
[rep_len
].pos_prev
= 0;
404 coder
->opts
[rep_len
].back_prev
= i
;
405 coder
->opts
[rep_len
].prev_1_is_literal
= false;
407 } while (--rep_len
>= 2);
411 const uint32_t normal_match_price
= match_price
412 + rc_bit_0_price(coder
->is_rep
[coder
->state
]);
414 len
= rep_lens
[0] >= 2 ? rep_lens
[0] + 1 : 2;
415 if (len
<= len_main
) {
417 while (len
> coder
->matches
[i
].len
)
421 const uint32_t dist
= coder
->matches
[i
].dist
;
422 const uint32_t cur_and_len_price
= normal_match_price
423 + get_dist_len_price(coder
,
424 dist
, len
, pos_state
);
426 if (cur_and_len_price
< coder
->opts
[len
].price
) {
427 coder
->opts
[len
].price
= cur_and_len_price
;
428 coder
->opts
[len
].pos_prev
= 0;
429 coder
->opts
[len
].back_prev
= dist
+ REPS
;
430 coder
->opts
[len
].prev_1_is_literal
= false;
433 if (len
== coder
->matches
[i
].len
)
434 if (++i
== matches_count
)
443 static inline uint32_t
444 helper2(lzma_lzma1_encoder
*coder
, uint32_t *reps
, const uint8_t *buf
,
445 uint32_t len_end
, uint32_t position
, const uint32_t cur
,
446 const uint32_t nice_len
, const uint32_t buf_avail_full
)
448 uint32_t matches_count
= coder
->matches_count
;
449 uint32_t new_len
= coder
->longest_match_length
;
450 uint32_t pos_prev
= coder
->opts
[cur
].pos_prev
;
451 lzma_lzma_state state
;
453 if (coder
->opts
[cur
].prev_1_is_literal
) {
456 if (coder
->opts
[cur
].prev_2
) {
457 state
= coder
->opts
[coder
->opts
[cur
].pos_prev_2
].state
;
459 if (coder
->opts
[cur
].back_prev_2
< REPS
)
460 update_long_rep(state
);
465 state
= coder
->opts
[pos_prev
].state
;
468 update_literal(state
);
471 state
= coder
->opts
[pos_prev
].state
;
474 if (pos_prev
== cur
- 1) {
475 if (is_short_rep(coder
->opts
[cur
]))
476 update_short_rep(state
);
478 update_literal(state
);
481 if (coder
->opts
[cur
].prev_1_is_literal
482 && coder
->opts
[cur
].prev_2
) {
483 pos_prev
= coder
->opts
[cur
].pos_prev_2
;
484 pos
= coder
->opts
[cur
].back_prev_2
;
485 update_long_rep(state
);
487 pos
= coder
->opts
[cur
].back_prev
;
489 update_long_rep(state
);
495 reps
[0] = coder
->opts
[pos_prev
].backs
[pos
];
498 for (i
= 1; i
<= pos
; ++i
)
499 reps
[i
] = coder
->opts
[pos_prev
].backs
[i
- 1];
501 for (; i
< REPS
; ++i
)
502 reps
[i
] = coder
->opts
[pos_prev
].backs
[i
];
505 reps
[0] = pos
- REPS
;
507 for (uint32_t i
= 1; i
< REPS
; ++i
)
508 reps
[i
] = coder
->opts
[pos_prev
].backs
[i
- 1];
512 coder
->opts
[cur
].state
= state
;
514 for (uint32_t i
= 0; i
< REPS
; ++i
)
515 coder
->opts
[cur
].backs
[i
] = reps
[i
];
517 const uint32_t cur_price
= coder
->opts
[cur
].price
;
519 const uint8_t current_byte
= *buf
;
520 const uint8_t match_byte
= *(buf
- reps
[0] - 1);
522 const uint32_t pos_state
= position
& coder
->pos_mask
;
524 const uint32_t cur_and_1_price
= cur_price
525 + rc_bit_0_price(coder
->is_match
[state
][pos_state
])
526 + get_literal_price(coder
, position
, buf
[-1],
527 !is_literal_state(state
), match_byte
, current_byte
);
529 bool next_is_literal
= false;
531 if (cur_and_1_price
< coder
->opts
[cur
+ 1].price
) {
532 coder
->opts
[cur
+ 1].price
= cur_and_1_price
;
533 coder
->opts
[cur
+ 1].pos_prev
= cur
;
534 make_literal(&coder
->opts
[cur
+ 1]);
535 next_is_literal
= true;
538 const uint32_t match_price
= cur_price
539 + rc_bit_1_price(coder
->is_match
[state
][pos_state
]);
540 const uint32_t rep_match_price
= match_price
541 + rc_bit_1_price(coder
->is_rep
[state
]);
543 if (match_byte
== current_byte
544 && !(coder
->opts
[cur
+ 1].pos_prev
< cur
545 && coder
->opts
[cur
+ 1].back_prev
== 0)) {
547 const uint32_t short_rep_price
= rep_match_price
548 + get_short_rep_price(coder
, state
, pos_state
);
550 if (short_rep_price
<= coder
->opts
[cur
+ 1].price
) {
551 coder
->opts
[cur
+ 1].price
= short_rep_price
;
552 coder
->opts
[cur
+ 1].pos_prev
= cur
;
553 make_short_rep(&coder
->opts
[cur
+ 1]);
554 next_is_literal
= true;
558 if (buf_avail_full
< 2)
561 const uint32_t buf_avail
= my_min(buf_avail_full
, nice_len
);
563 if (!next_is_literal
&& match_byte
!= current_byte
) { // speed optimization
564 // try literal + rep0
565 const uint8_t *const buf_back
= buf
- reps
[0] - 1;
566 const uint32_t limit
= my_min(buf_avail_full
, nice_len
+ 1);
568 const uint32_t len_test
= lzma_memcmplen(buf
, buf_back
, 1, limit
) - 1;
571 lzma_lzma_state state_2
= state
;
572 update_literal(state_2
);
574 const uint32_t pos_state_next
= (position
+ 1) & coder
->pos_mask
;
575 const uint32_t next_rep_match_price
= cur_and_1_price
576 + rc_bit_1_price(coder
->is_match
[state_2
][pos_state_next
])
577 + rc_bit_1_price(coder
->is_rep
[state_2
]);
579 //for (; len_test >= 2; --len_test) {
580 const uint32_t offset
= cur
+ 1 + len_test
;
582 while (len_end
< offset
)
583 coder
->opts
[++len_end
].price
= RC_INFINITY_PRICE
;
585 const uint32_t cur_and_len_price
= next_rep_match_price
586 + get_rep_price(coder
, 0, len_test
,
587 state_2
, pos_state_next
);
589 if (cur_and_len_price
< coder
->opts
[offset
].price
) {
590 coder
->opts
[offset
].price
= cur_and_len_price
;
591 coder
->opts
[offset
].pos_prev
= cur
+ 1;
592 coder
->opts
[offset
].back_prev
= 0;
593 coder
->opts
[offset
].prev_1_is_literal
= true;
594 coder
->opts
[offset
].prev_2
= false;
601 uint32_t start_len
= 2; // speed optimization
603 for (uint32_t rep_index
= 0; rep_index
< REPS
; ++rep_index
) {
604 const uint8_t *const buf_back
= buf
- reps
[rep_index
] - 1;
605 if (not_equal_16(buf
, buf_back
))
608 uint32_t len_test
= lzma_memcmplen(buf
, buf_back
, 2, buf_avail
);
610 while (len_end
< cur
+ len_test
)
611 coder
->opts
[++len_end
].price
= RC_INFINITY_PRICE
;
613 const uint32_t len_test_temp
= len_test
;
614 const uint32_t price
= rep_match_price
+ get_pure_rep_price(
615 coder
, rep_index
, state
, pos_state
);
618 const uint32_t cur_and_len_price
= price
619 + get_len_price(&coder
->rep_len_encoder
,
620 len_test
, pos_state
);
622 if (cur_and_len_price
< coder
->opts
[cur
+ len_test
].price
) {
623 coder
->opts
[cur
+ len_test
].price
= cur_and_len_price
;
624 coder
->opts
[cur
+ len_test
].pos_prev
= cur
;
625 coder
->opts
[cur
+ len_test
].back_prev
= rep_index
;
626 coder
->opts
[cur
+ len_test
].prev_1_is_literal
= false;
628 } while (--len_test
>= 2);
630 len_test
= len_test_temp
;
633 start_len
= len_test
+ 1;
636 uint32_t len_test_2
= len_test
+ 1;
637 const uint32_t limit
= my_min(buf_avail_full
,
638 len_test_2
+ nice_len
);
639 // NOTE: len_test_2 may be greater than limit so the call to
640 // lzma_memcmplen() must be done conditionally.
641 if (len_test_2
< limit
)
642 len_test_2
= lzma_memcmplen(buf
, buf_back
, len_test_2
, limit
);
644 len_test_2
-= len_test
+ 1;
646 if (len_test_2
>= 2) {
647 lzma_lzma_state state_2
= state
;
648 update_long_rep(state_2
);
650 uint32_t pos_state_next
= (position
+ len_test
) & coder
->pos_mask
;
652 const uint32_t cur_and_len_literal_price
= price
653 + get_len_price(&coder
->rep_len_encoder
,
655 + rc_bit_0_price(coder
->is_match
[state_2
][pos_state_next
])
656 + get_literal_price(coder
, position
+ len_test
,
657 buf
[len_test
- 1], true,
658 buf_back
[len_test
], buf
[len_test
]);
660 update_literal(state_2
);
662 pos_state_next
= (position
+ len_test
+ 1) & coder
->pos_mask
;
664 const uint32_t next_rep_match_price
= cur_and_len_literal_price
665 + rc_bit_1_price(coder
->is_match
[state_2
][pos_state_next
])
666 + rc_bit_1_price(coder
->is_rep
[state_2
]);
668 //for(; len_test_2 >= 2; len_test_2--) {
669 const uint32_t offset
= cur
+ len_test
+ 1 + len_test_2
;
671 while (len_end
< offset
)
672 coder
->opts
[++len_end
].price
= RC_INFINITY_PRICE
;
674 const uint32_t cur_and_len_price
= next_rep_match_price
675 + get_rep_price(coder
, 0, len_test_2
,
676 state_2
, pos_state_next
);
678 if (cur_and_len_price
< coder
->opts
[offset
].price
) {
679 coder
->opts
[offset
].price
= cur_and_len_price
;
680 coder
->opts
[offset
].pos_prev
= cur
+ len_test
+ 1;
681 coder
->opts
[offset
].back_prev
= 0;
682 coder
->opts
[offset
].prev_1_is_literal
= true;
683 coder
->opts
[offset
].prev_2
= true;
684 coder
->opts
[offset
].pos_prev_2
= cur
;
685 coder
->opts
[offset
].back_prev_2
= rep_index
;
692 //for (uint32_t len_test = 2; len_test <= new_len; ++len_test)
693 if (new_len
> buf_avail
) {
697 while (new_len
> coder
->matches
[matches_count
].len
)
700 coder
->matches
[matches_count
++].len
= new_len
;
704 if (new_len
>= start_len
) {
705 const uint32_t normal_match_price
= match_price
706 + rc_bit_0_price(coder
->is_rep
[state
]);
708 while (len_end
< cur
+ new_len
)
709 coder
->opts
[++len_end
].price
= RC_INFINITY_PRICE
;
712 while (start_len
> coder
->matches
[i
].len
)
715 for (uint32_t len_test
= start_len
; ; ++len_test
) {
716 const uint32_t cur_back
= coder
->matches
[i
].dist
;
717 uint32_t cur_and_len_price
= normal_match_price
718 + get_dist_len_price(coder
,
719 cur_back
, len_test
, pos_state
);
721 if (cur_and_len_price
< coder
->opts
[cur
+ len_test
].price
) {
722 coder
->opts
[cur
+ len_test
].price
= cur_and_len_price
;
723 coder
->opts
[cur
+ len_test
].pos_prev
= cur
;
724 coder
->opts
[cur
+ len_test
].back_prev
726 coder
->opts
[cur
+ len_test
].prev_1_is_literal
= false;
729 if (len_test
== coder
->matches
[i
].len
) {
730 // Try Match + Literal + Rep0
731 const uint8_t *const buf_back
= buf
- cur_back
- 1;
732 uint32_t len_test_2
= len_test
+ 1;
733 const uint32_t limit
= my_min(buf_avail_full
,
734 len_test_2
+ nice_len
);
736 // NOTE: len_test_2 may be greater than limit
737 // so the call to lzma_memcmplen() must be
738 // done conditionally.
739 if (len_test_2
< limit
)
740 len_test_2
= lzma_memcmplen(buf
, buf_back
,
743 len_test_2
-= len_test
+ 1;
745 if (len_test_2
>= 2) {
746 lzma_lzma_state state_2
= state
;
747 update_match(state_2
);
748 uint32_t pos_state_next
749 = (position
+ len_test
) & coder
->pos_mask
;
751 const uint32_t cur_and_len_literal_price
= cur_and_len_price
753 coder
->is_match
[state_2
][pos_state_next
])
754 + get_literal_price(coder
,
761 update_literal(state_2
);
762 pos_state_next
= (pos_state_next
+ 1) & coder
->pos_mask
;
764 const uint32_t next_rep_match_price
765 = cur_and_len_literal_price
767 coder
->is_match
[state_2
][pos_state_next
])
768 + rc_bit_1_price(coder
->is_rep
[state_2
]);
770 // for(; len_test_2 >= 2; --len_test_2) {
771 const uint32_t offset
= cur
+ len_test
+ 1 + len_test_2
;
773 while (len_end
< offset
)
774 coder
->opts
[++len_end
].price
= RC_INFINITY_PRICE
;
776 cur_and_len_price
= next_rep_match_price
777 + get_rep_price(coder
, 0, len_test_2
,
778 state_2
, pos_state_next
);
780 if (cur_and_len_price
< coder
->opts
[offset
].price
) {
781 coder
->opts
[offset
].price
= cur_and_len_price
;
782 coder
->opts
[offset
].pos_prev
= cur
+ len_test
+ 1;
783 coder
->opts
[offset
].back_prev
= 0;
784 coder
->opts
[offset
].prev_1_is_literal
= true;
785 coder
->opts
[offset
].prev_2
= true;
786 coder
->opts
[offset
].pos_prev_2
= cur
;
787 coder
->opts
[offset
].back_prev_2
793 if (++i
== matches_count
)
804 lzma_lzma_optimum_normal(lzma_lzma1_encoder
*restrict coder
,
805 lzma_mf
*restrict mf
,
806 uint32_t *restrict back_res
, uint32_t *restrict len_res
,
809 // If we have symbols pending, return the next pending symbol.
810 if (coder
->opts_end_index
!= coder
->opts_current_index
) {
811 assert(mf
->read_ahead
> 0);
812 *len_res
= coder
->opts
[coder
->opts_current_index
].pos_prev
813 - coder
->opts_current_index
;
814 *back_res
= coder
->opts
[coder
->opts_current_index
].back_prev
;
815 coder
->opts_current_index
= coder
->opts
[
816 coder
->opts_current_index
].pos_prev
;
820 // Update the price tables. In LZMA SDK <= 4.60 (and possibly later)
821 // this was done in both initialization function and in the main loop.
822 // In liblzma they were moved into this single place.
823 if (mf
->read_ahead
== 0) {
824 if (coder
->match_price_count
>= (1 << 7))
825 fill_dist_prices(coder
);
827 if (coder
->align_price_count
>= ALIGN_SIZE
)
828 fill_align_prices(coder
);
831 // TODO: This needs quite a bit of cleaning still. But splitting
832 // the original function into two pieces makes it at least a little
833 // more readable, since those two parts don't share many variables.
835 uint32_t len_end
= helper1(coder
, mf
, back_res
, len_res
, position
);
836 if (len_end
== UINT32_MAX
)
840 memcpy(reps
, coder
->reps
, sizeof(reps
));
843 for (cur
= 1; cur
< len_end
; ++cur
) {
846 coder
->longest_match_length
= mf_find(
847 mf
, &coder
->matches_count
, coder
->matches
);
849 if (coder
->longest_match_length
>= mf
->nice_len
)
852 len_end
= helper2(coder
, reps
, mf_ptr(mf
) - 1, len_end
,
853 position
+ cur
, cur
, mf
->nice_len
,
854 my_min(mf_avail(mf
) + 1, OPTS
- 1 - cur
));
857 backward(coder
, len_res
, back_res
, cur
);