]> wimlib.net Git - wimlib/blobdiff - src/lzx_compress.c
bt_matchfinder: use 4-byte hashing for trees
[wimlib] / src / lzx_compress.c
index de8430145f4e7a2867ad60c59b5d17a6b5a4436e..36e27dbed6c2ecdda17b4206e3d8a75c0b299ceb 100644 (file)
 #define LZX_BIT_COST           16
 
 /*
- * Consideration of aligned offset costs is disabled for now, due to
- * insufficient benefit gained from the time spent.
+ * Should the compressor take into account the costs of aligned offset symbols?
  */
-#define LZX_CONSIDER_ALIGNED_COSTS     0
+#define LZX_CONSIDER_ALIGNED_COSTS     1
 
 /*
  * LZX_MAX_FAST_LEVEL is the maximum compression level at which we use the
 #include "wimlib/util.h"
 
 /* Matchfinders with 16-bit positions  */
-#define pos_t  u16
-#define MF_SUFFIX _16
+#define mf_pos_t       u16
+#define MF_SUFFIX      _16
 #include "wimlib/bt_matchfinder.h"
 #include "wimlib/hc_matchfinder.h"
 
 /* Matchfinders with 32-bit positions  */
-#undef pos_t
+#undef mf_pos_t
 #undef MF_SUFFIX
-#define pos_t  u32
-#define MF_SUFFIX _32
+#define mf_pos_t       u32
+#define MF_SUFFIX      _32
 #include "wimlib/bt_matchfinder.h"
 #include "wimlib/hc_matchfinder.h"
 
@@ -1557,16 +1556,18 @@ lzx_find_min_cost_path(struct lzx_compressor * const restrict c,
                                u32 offset_data = offset + LZX_OFFSET_ADJUSTMENT;
                                unsigned offset_slot = lzx_comp_get_offset_slot(c, offset_data,
                                                                                is_16_bit);
+                               u32 base_cost = cur_node->cost;
+
+                       #if LZX_CONSIDER_ALIGNED_COSTS
+                               if (offset_data >= 16)
+                                       base_cost += c->costs.aligned[offset_data &
+                                                                     LZX_ALIGNED_OFFSET_BITMASK];
+                       #endif
+
                                do {
-                                       u32 cost = cur_node->cost +
+                                       u32 cost = base_cost +
                                                   c->costs.match_cost[offset_slot][
                                                                next_len - LZX_MIN_MATCH_LEN];
-                               #if LZX_CONSIDER_ALIGNED_COSTS
-                                       if (lzx_extra_offset_bits[offset_slot] >=
-                                           LZX_NUM_ALIGNED_OFFSET_BITS)
-                                               cost += c->costs.aligned[offset_data &
-                                                                        LZX_ALIGNED_OFFSET_BITMASK];
-                               #endif
                                        if (cost < (cur_node + next_len)->cost) {
                                                (cur_node + next_len)->cost = cost;
                                                (cur_node + next_len)->item =
@@ -1635,7 +1636,7 @@ lzx_compute_match_costs(struct lzx_compressor *c)
                unsigned i;
 
        #if LZX_CONSIDER_ALIGNED_COSTS
-               if (lzx_extra_offset_bits[offset_slot] >= LZX_NUM_ALIGNED_OFFSET_BITS)
+               if (offset_slot >= 8)
                        extra_cost -= LZX_NUM_ALIGNED_OFFSET_BITS * LZX_BIT_COST;
        #endif
 
@@ -1778,9 +1779,9 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
        const u8 * const in_begin = c->in_buffer;
        const u8 *       in_next = in_begin;
        const u8 * const in_end  = in_begin + c->in_nbytes;
-       unsigned max_len = LZX_MAX_MATCH_LEN;
-       unsigned nice_len = min(c->nice_match_length, max_len);
-       u32 next_hash = 0;
+       u32 max_len = LZX_MAX_MATCH_LEN;
+       u32 nice_len = min(c->nice_match_length, max_len);
+       u32 next_hashes[2] = {};
        struct lzx_lru_queue queue;
 
        CALL_BT_MF(is_16_bit, c, bt_matchfinder_init);
@@ -1796,20 +1797,14 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                struct lz_match *cache_ptr = c->match_cache;
                do {
                        struct lz_match *lz_matchptr;
-                       unsigned best_len;
+                       u32 best_len;
 
                        /* If approaching the end of the input buffer, adjust
                         * 'max_len' and 'nice_len' accordingly.  */
                        if (unlikely(max_len > in_end - in_next)) {
                                max_len = in_end - in_next;
                                nice_len = min(max_len, nice_len);
-
-                               /* This extra check is needed to ensure that we
-                                * never output a length 2 match of the very
-                                * last two bytes with the very first two bytes,
-                                * since such a match has an offset too large to
-                                * be represented.  */
-                               if (unlikely(max_len < 3)) {
+                               if (unlikely(max_len < 5)) {
                                        in_next++;
                                        cache_ptr->length = 0;
                                        cache_ptr++;
@@ -1824,7 +1819,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                                 max_len,
                                                 nice_len,
                                                 c->max_search_depth,
-                                                &next_hash,
+                                                next_hashes,
                                                 &best_len,
                                                 cache_ptr + 1);
                        in_next++;
@@ -1849,7 +1844,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                        if (unlikely(max_len > in_end - in_next)) {
                                                max_len = in_end - in_next;
                                                nice_len = min(max_len, nice_len);
-                                               if (unlikely(max_len < 3)) {
+                                               if (unlikely(max_len < 5)) {
                                                        in_next++;
                                                        cache_ptr->length = 0;
                                                        cache_ptr++;
@@ -1862,7 +1857,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                                   max_len,
                                                   nice_len,
                                                   c->max_search_depth,
-                                                  &next_hash);
+                                                  next_hashes);
                                        in_next++;
                                        cache_ptr->length = 0;
                                        cache_ptr++;