/* The matches and literals that the parser has chosen for the current
* block. The required length of this array is limited by the maximum
- * number of matches that can ever be chosen for a single block. */
- struct lzx_sequence chosen_sequences[DIV_ROUND_UP(LZX_DIV_BLOCK_SIZE, LZX_MIN_MATCH_LEN)];
+ * number of matches that can ever be chosen for a single block, plus
+ * one for the special entry at the end. */
+ struct lzx_sequence chosen_sequences[
+ DIV_ROUND_UP(LZX_DIV_BLOCK_SIZE, LZX_MIN_MATCH_LEN) + 1];
/* Tables for mapping adjusted offsets to offset slots */
/* Can the specified number of bits always be added to 'bitbuf' after any
* pending 16-bit coding units have been flushed? */
-#define CAN_BUFFER(n) ((n) <= (8 * sizeof(machine_word_t)) - 16)
+#define CAN_BUFFER(n) ((n) <= (8 * sizeof(machine_word_t)) - 15)
/*
* Initialize the output bitstream.
STATIC_ASSERT(MAIN_CODEWORD_LIMIT >= 9 &&
MAIN_CODEWORD_LIMIT <= LZX_MAX_MAIN_CODEWORD_LEN);
- STATIC_ASSERT(LENGTH_CODEWORD_LIMIT >= 9 &&
+ STATIC_ASSERT(LENGTH_CODEWORD_LIMIT >= 8 &&
LENGTH_CODEWORD_LIMIT <= LZX_MAX_LEN_CODEWORD_LEN);
STATIC_ASSERT(ALIGNED_CODEWORD_LIMIT >= LZX_NUM_ALIGNED_OFFSET_BITS &&
ALIGNED_CODEWORD_LIMIT <= LZX_MAX_ALIGNED_CODEWORD_LEN);
if (!CAN_BUFFER(MAX_MATCH_BITS))
lzx_flush_bits(os, ALIGNED_CODEWORD_LIMIT);
} else {
+ STATIC_ASSERT(CAN_BUFFER(17));
+
lzx_add_bits(os, extra_bits, num_extra_bits);
if (!CAN_BUFFER(MAX_MATCH_BITS))
lzx_flush_bits(os, 17);
* of coding the literal is integrated into the queue update
* code below. */
literal = *in_next++;
- cost = cur_node->cost +
- c->costs.main[lzx_main_symbol_for_literal(literal)];
+ cost = cur_node->cost + c->costs.main[literal];
/* Advance to the next position. */
cur_node++;
static void
lzx_compute_match_costs(struct lzx_compressor *c)
{
- unsigned num_offset_slots = lzx_get_num_offset_slots(c->window_order);
+ unsigned num_offset_slots = (c->num_main_syms - LZX_NUM_CHARS) / LZX_NUM_LEN_HEADERS;
struct lzx_costs *costs = &c->costs;
for (unsigned offset_slot = 0; offset_slot < num_offset_slots; offset_slot++) {
u32 extra_cost = (u32)lzx_extra_offset_bits[offset_slot] * LZX_BIT_COST;
- unsigned main_symbol = lzx_main_symbol_for_match(offset_slot, 0);
+ unsigned main_symbol = LZX_NUM_CHARS + (offset_slot * LZX_NUM_LEN_HEADERS);
unsigned i;
#if LZX_CONSIDER_ALIGNED_COSTS
unsigned i;
const struct lzx_lens *lens = &c->codes[c->codes_index].lens;
- for (i = 0; i < c->num_main_syms; i++)
- c->costs.main[i] = (lens->main[i] ? lens->main[i] : 15) * LZX_BIT_COST;
+ for (i = 0; i < c->num_main_syms; i++) {
+ c->costs.main[i] = (lens->main[i] ? lens->main[i] :
+ MAIN_CODEWORD_LIMIT) * LZX_BIT_COST;
+ }
- for (i = 0; i < LZX_LENCODE_NUM_SYMBOLS; i++)
- c->costs.len[i] = (lens->len[i] ? lens->len[i] : 15) * LZX_BIT_COST;
+ for (i = 0; i < LZX_LENCODE_NUM_SYMBOLS; i++) {
+ c->costs.len[i] = (lens->len[i] ? lens->len[i] :
+ LENGTH_CODEWORD_LIMIT) * LZX_BIT_COST;
+ }
#if LZX_CONSIDER_ALIGNED_COSTS
- for (i = 0; i < LZX_ALIGNEDCODE_NUM_SYMBOLS; i++)
- c->costs.aligned[i] = (lens->aligned[i] ? lens->aligned[i] : 7) * LZX_BIT_COST;
+ for (i = 0; i < LZX_ALIGNEDCODE_NUM_SYMBOLS; i++) {
+ c->costs.aligned[i] = (lens->aligned[i] ? lens->aligned[i] :
+ ALIGNED_CODEWORD_LIMIT) * LZX_BIT_COST;
+ }
#endif
lzx_compute_match_costs(c);
const u8 * const in_begin = c->in_buffer;
const u8 * in_next = in_begin;
const u8 * const in_end = in_begin + c->in_nbytes;
- unsigned max_len = LZX_MAX_MATCH_LEN;
- unsigned nice_len = min(c->nice_match_length, max_len);
- u32 next_hash = 0;
+ u32 max_len = LZX_MAX_MATCH_LEN;
+ u32 nice_len = min(c->nice_match_length, max_len);
+ u32 next_hashes[2] = {};
struct lzx_lru_queue queue;
CALL_BT_MF(is_16_bit, c, bt_matchfinder_init);
struct lz_match *cache_ptr = c->match_cache;
do {
struct lz_match *lz_matchptr;
- unsigned best_len;
+ u32 best_len;
/* If approaching the end of the input buffer, adjust
* 'max_len' and 'nice_len' accordingly. */
if (unlikely(max_len > in_end - in_next)) {
max_len = in_end - in_next;
nice_len = min(max_len, nice_len);
-
- /* This extra check is needed to ensure that we
- * never output a length 2 match of the very
- * last two bytes with the very first two bytes,
- * since such a match has an offset too large to
- * be represented. */
- if (unlikely(max_len < 3)) {
+ if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) {
in_next++;
cache_ptr->length = 0;
cache_ptr++;
max_len,
nice_len,
c->max_search_depth,
- &next_hash,
+ next_hashes,
&best_len,
cache_ptr + 1);
in_next++;
if (unlikely(max_len > in_end - in_next)) {
max_len = in_end - in_next;
nice_len = min(max_len, nice_len);
- if (unlikely(max_len < 3)) {
+ if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) {
in_next++;
cache_ptr->length = 0;
cache_ptr++;
max_len,
nice_len,
c->max_search_depth,
- &next_hash);
+ next_hashes);
in_next++;
cache_ptr->length = 0;
cache_ptr++;
c->impl = lzx_compress_lazy_16;
else
c->impl = lzx_compress_lazy_32;
- c->max_search_depth = (36 * compression_level) / 20;
- c->nice_match_length = (72 * compression_level) / 20;
+ c->max_search_depth = (60 * compression_level) / 20;
+ c->nice_match_length = (80 * compression_level) / 20;
/* lzx_compress_lazy() needs max_search_depth >= 2 because it
* halves the max_search_depth when attempting a lazy match, and
/* Scale nice_match_length and max_search_depth with the
* compression level. */
c->max_search_depth = (24 * compression_level) / 50;
- c->nice_match_length = (32 * compression_level) / 50;
+ c->nice_match_length = (48 * compression_level) / 50;
/* Set a number of optimization passes appropriate for the
* compression level. */
else
memcpy(c->in_buffer, in, in_nbytes);
c->in_nbytes = in_nbytes;
- lzx_do_e8_preprocessing(c->in_buffer, in_nbytes);
+ lzx_preprocess(c->in_buffer, in_nbytes);
/* Initially, the previous Huffman codeword lengths are all zeroes. */
c->codes_index = 0;
/* Flush the output bitstream and return the compressed size or 0. */
result = lzx_flush_output(&os);
if (!result && c->destructive)
- lzx_undo_e8_preprocessing(c->in_buffer, c->in_nbytes);
+ lzx_postprocess(c->in_buffer, c->in_nbytes);
return result;
}