]> wimlib.net Git - wimlib/blobdiff - src/lzx_compress.c
lzx_compress.c: chosen_sequences[] length was 1 too short
[wimlib] / src / lzx_compress.c
index d36f8aaa380ae747723f8e0f475862464af5145b..ec90ba1640cc0a11f18b1b848e5dfee28e915356 100644 (file)
@@ -410,8 +410,10 @@ struct lzx_compressor {
 
        /* The matches and literals that the parser has chosen for the current
         * block.  The required length of this array is limited by the maximum
-        * number of matches that can ever be chosen for a single block.  */
-       struct lzx_sequence chosen_sequences[DIV_ROUND_UP(LZX_DIV_BLOCK_SIZE, LZX_MIN_MATCH_LEN)];
+        * number of matches that can ever be chosen for a single block, plus
+        * one for the special entry at the end.  */
+       struct lzx_sequence chosen_sequences[
+                      DIV_ROUND_UP(LZX_DIV_BLOCK_SIZE, LZX_MIN_MATCH_LEN) + 1];
 
        /* Tables for mapping adjusted offsets to offset slots  */
 
@@ -548,7 +550,7 @@ struct lzx_output_bitstream {
 
 /* Can the specified number of bits always be added to 'bitbuf' after any
  * pending 16-bit coding units have been flushed?  */
-#define CAN_BUFFER(n)  ((n) <= (8 * sizeof(machine_word_t)) - 16)
+#define CAN_BUFFER(n)  ((n) <= (8 * sizeof(machine_word_t)) - 15)
 
 /*
  * Initialize the output bitstream.
@@ -994,6 +996,8 @@ lzx_write_sequences(struct lzx_output_bitstream *os, int block_type,
                        if (!CAN_BUFFER(MAX_MATCH_BITS))
                                lzx_flush_bits(os, ALIGNED_CODEWORD_LIMIT);
                } else {
+                       STATIC_ASSERT(CAN_BUFFER(17));
+
                        lzx_add_bits(os, extra_bits, num_extra_bits);
                        if (!CAN_BUFFER(MAX_MATCH_BITS))
                                lzx_flush_bits(os, 17);
@@ -1585,8 +1589,7 @@ lzx_find_min_cost_path(struct lzx_compressor * const restrict c,
                 * of coding the literal is integrated into the queue update
                 * code below.  */
                literal = *in_next++;
-               cost = cur_node->cost +
-                      c->costs.main[lzx_main_symbol_for_literal(literal)];
+               cost = cur_node->cost + c->costs.main[literal];
 
                /* Advance to the next position.  */
                cur_node++;
@@ -1626,13 +1629,13 @@ lzx_find_min_cost_path(struct lzx_compressor * const restrict c,
 static void
 lzx_compute_match_costs(struct lzx_compressor *c)
 {
-       unsigned num_offset_slots = lzx_get_num_offset_slots(c->window_order);
+       unsigned num_offset_slots = (c->num_main_syms - LZX_NUM_CHARS) / LZX_NUM_LEN_HEADERS;
        struct lzx_costs *costs = &c->costs;
 
        for (unsigned offset_slot = 0; offset_slot < num_offset_slots; offset_slot++) {
 
                u32 extra_cost = (u32)lzx_extra_offset_bits[offset_slot] * LZX_BIT_COST;
-               unsigned main_symbol = lzx_main_symbol_for_match(offset_slot, 0);
+               unsigned main_symbol = LZX_NUM_CHARS + (offset_slot * LZX_NUM_LEN_HEADERS);
                unsigned i;
 
        #if LZX_CONSIDER_ALIGNED_COSTS
@@ -1710,15 +1713,21 @@ lzx_update_costs(struct lzx_compressor *c)
        unsigned i;
        const struct lzx_lens *lens = &c->codes[c->codes_index].lens;
 
-       for (i = 0; i < c->num_main_syms; i++)
-               c->costs.main[i] = (lens->main[i] ? lens->main[i] : 15) * LZX_BIT_COST;
+       for (i = 0; i < c->num_main_syms; i++) {
+               c->costs.main[i] = (lens->main[i] ? lens->main[i] :
+                                   MAIN_CODEWORD_LIMIT) * LZX_BIT_COST;
+       }
 
-       for (i = 0; i < LZX_LENCODE_NUM_SYMBOLS; i++)
-               c->costs.len[i] = (lens->len[i] ? lens->len[i] : 15) * LZX_BIT_COST;
+       for (i = 0; i < LZX_LENCODE_NUM_SYMBOLS; i++) {
+               c->costs.len[i] = (lens->len[i] ? lens->len[i] :
+                                  LENGTH_CODEWORD_LIMIT) * LZX_BIT_COST;
+       }
 
 #if LZX_CONSIDER_ALIGNED_COSTS
-       for (i = 0; i < LZX_ALIGNEDCODE_NUM_SYMBOLS; i++)
-               c->costs.aligned[i] = (lens->aligned[i] ? lens->aligned[i] : 7) * LZX_BIT_COST;
+       for (i = 0; i < LZX_ALIGNEDCODE_NUM_SYMBOLS; i++) {
+               c->costs.aligned[i] = (lens->aligned[i] ? lens->aligned[i] :
+                                      ALIGNED_CODEWORD_LIMIT) * LZX_BIT_COST;
+       }
 #endif
 
        lzx_compute_match_costs(c);
@@ -1781,7 +1790,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
        const u8 * const in_end  = in_begin + c->in_nbytes;
        u32 max_len = LZX_MAX_MATCH_LEN;
        u32 nice_len = min(c->nice_match_length, max_len);
-       u32 next_hash = 0;
+       u32 next_hashes[2] = {};
        struct lzx_lru_queue queue;
 
        CALL_BT_MF(is_16_bit, c, bt_matchfinder_init);
@@ -1804,7 +1813,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                        if (unlikely(max_len > in_end - in_next)) {
                                max_len = in_end - in_next;
                                nice_len = min(max_len, nice_len);
-                               if (unlikely(max_len < 5)) {
+                               if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) {
                                        in_next++;
                                        cache_ptr->length = 0;
                                        cache_ptr++;
@@ -1819,7 +1828,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                                 max_len,
                                                 nice_len,
                                                 c->max_search_depth,
-                                                &next_hash,
+                                                next_hashes,
                                                 &best_len,
                                                 cache_ptr + 1);
                        in_next++;
@@ -1844,7 +1853,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                        if (unlikely(max_len > in_end - in_next)) {
                                                max_len = in_end - in_next;
                                                nice_len = min(max_len, nice_len);
-                                               if (unlikely(max_len < 5)) {
+                                               if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) {
                                                        in_next++;
                                                        cache_ptr->length = 0;
                                                        cache_ptr++;
@@ -1857,7 +1866,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                                   max_len,
                                                   nice_len,
                                                   c->max_search_depth,
-                                                  &next_hash);
+                                                  next_hashes);
                                        in_next++;
                                        cache_ptr->length = 0;
                                        cache_ptr++;
@@ -2258,8 +2267,8 @@ lzx_create_compressor(size_t max_bufsize, unsigned compression_level,
                        c->impl = lzx_compress_lazy_16;
                else
                        c->impl = lzx_compress_lazy_32;
-               c->max_search_depth = (36 * compression_level) / 20;
-               c->nice_match_length = (72 * compression_level) / 20;
+               c->max_search_depth = (60 * compression_level) / 20;
+               c->nice_match_length = (80 * compression_level) / 20;
 
                /* lzx_compress_lazy() needs max_search_depth >= 2 because it
                 * halves the max_search_depth when attempting a lazy match, and
@@ -2278,7 +2287,7 @@ lzx_create_compressor(size_t max_bufsize, unsigned compression_level,
                /* Scale nice_match_length and max_search_depth with the
                 * compression level.  */
                c->max_search_depth = (24 * compression_level) / 50;
-               c->nice_match_length = (32 * compression_level) / 50;
+               c->nice_match_length = (48 * compression_level) / 50;
 
                /* Set a number of optimization passes appropriate for the
                 * compression level.  */
@@ -2339,7 +2348,7 @@ lzx_compress(const void *restrict in, size_t in_nbytes,
        else
                memcpy(c->in_buffer, in, in_nbytes);
        c->in_nbytes = in_nbytes;
-       lzx_do_e8_preprocessing(c->in_buffer, in_nbytes);
+       lzx_preprocess(c->in_buffer, in_nbytes);
 
        /* Initially, the previous Huffman codeword lengths are all zeroes.  */
        c->codes_index = 0;
@@ -2354,7 +2363,7 @@ lzx_compress(const void *restrict in, size_t in_nbytes,
        /* Flush the output bitstream and return the compressed size or 0.  */
        result = lzx_flush_output(&os);
        if (!result && c->destructive)
-               lzx_undo_e8_preprocessing(c->in_buffer, c->in_nbytes);
+               lzx_postprocess(c->in_buffer, c->in_nbytes);
        return result;
 }