]> wimlib.net Git - wimlib/commitdiff
Allow hc_matchfinder and bt_matchfinder to be "templated"
authorEric Biggers <ebiggers3@gmail.com>
Sat, 19 Sep 2015 18:56:08 +0000 (13:56 -0500)
committerEric Biggers <ebiggers3@gmail.com>
Sun, 27 Sep 2015 14:41:13 +0000 (09:41 -0500)
include/wimlib/bt_matchfinder.h
include/wimlib/compiler.h
include/wimlib/hc_matchfinder.h
src/lzx_compress.c
src/xpress_compress.c

index 189c5a1571037cffe7f7ecead61a02275b9a66e6..459fedd368be9bb44704f6dd40e952da7500cc1d 100644 (file)
  * ----------------------------------------------------------------------------
  */
 
-#ifndef _BT_MATCHFINDER_H
-#define _BT_MATCHFINDER_H
-
-#ifndef MATCHFINDER_MAX_WINDOW_ORDER
-#  error "MATCHFINDER_MAX_WINDOW_ORDER must be defined!"
-#endif
 
 #include <string.h>
 
 #include "wimlib/lz_extend.h"
 #include "wimlib/lz_hash.h"
 
-#if MATCHFINDER_MAX_WINDOW_ORDER < 13
-#  define BT_MATCHFINDER_HASH_ORDER 14
-#elif MATCHFINDER_MAX_WINDOW_ORDER < 15
-#  define BT_MATCHFINDER_HASH_ORDER 15
-#else
-#  define BT_MATCHFINDER_HASH_ORDER 16
-#endif
+#define BT_MATCHFINDER_HASH_ORDER 16
+
+/* TEMPLATED functions and structures have MF_SUFFIX appended to their name.  */
+#undef TEMPLATED
+#define TEMPLATED(name)                CONCAT(name, MF_SUFFIX)
+
+#ifndef _WIMLIB_BT_MATCHFINDER_H
+#define _WIMLIB_BT_MATCHFINDER_H
 
-#if MATCHFINDER_MAX_WINDOW_ORDER <= 16
-typedef u16 pos_t;
-#else
-typedef u32 pos_t;
-#endif
+/* Non-templated definitions  */
 
 /* Representation of a match found by the bt_matchfinder  */
 struct lz_match {
 
        /* The number of bytes matched.  */
-       pos_t length;
+       u32 length;
 
        /* The offset back from the current position that was matched.  */
-       pos_t offset;
+       u32 offset;
 };
 
-struct bt_matchfinder {
+static inline u32
+bt_matchfinder_hash_3_bytes(const u8 *in_next)
+{
+       return lz_hash_3_bytes(in_next, BT_MATCHFINDER_HASH_ORDER);
+}
+
+#endif /* _WIMLIB_BT_MATCHFINDER_H */
+
+struct TEMPLATED(bt_matchfinder) {
        pos_t hash_tab[1UL << BT_MATCHFINDER_HASH_ORDER];
        pos_t child_tab[];
 };
@@ -89,45 +88,35 @@ struct bt_matchfinder {
 /* Return the number of bytes that must be allocated for a 'bt_matchfinder' that
  * can work with buffers up to the specified size.  */
 static inline size_t
-bt_matchfinder_size(size_t max_bufsize)
+TEMPLATED(bt_matchfinder_size)(size_t max_bufsize)
 {
-       return sizeof(struct bt_matchfinder) + (2 * max_bufsize * sizeof(pos_t));
+       return sizeof(struct TEMPLATED(bt_matchfinder)) +
+               (2 * max_bufsize * sizeof(pos_t));
 }
 
 /* Prepare the matchfinder for a new input buffer.  */
 static inline void
-bt_matchfinder_init(struct bt_matchfinder *mf)
+TEMPLATED(bt_matchfinder_init)(struct TEMPLATED(bt_matchfinder) *mf)
 {
        memset(mf, 0, sizeof(*mf));
 }
 
-static inline u32
-bt_matchfinder_hash_3_bytes(const u8 *in_next)
-{
-       return lz_hash_3_bytes(in_next, BT_MATCHFINDER_HASH_ORDER);
-}
-
 static inline pos_t *
-bt_child(struct bt_matchfinder *mf, pos_t node, int offset)
+TEMPLATED(bt_child)(struct TEMPLATED(bt_matchfinder) *mf, pos_t node, int offset)
 {
-       if (MATCHFINDER_MAX_WINDOW_ORDER < sizeof(pos_t) * 8) {
-               /* no cast needed */
-               return &mf->child_tab[(node << 1) + offset];
-       } else {
-               return &mf->child_tab[((size_t)node << 1) + offset];
-       }
+       return &mf->child_tab[(node << 1) + offset];
 }
 
 static inline pos_t *
-bt_left_child(struct bt_matchfinder *mf, pos_t node)
+TEMPLATED(bt_left_child)(struct TEMPLATED(bt_matchfinder) *mf, pos_t node)
 {
-       return bt_child(mf, node, 0);
+       return TEMPLATED(bt_child)(mf, node, 0);
 }
 
 static inline pos_t *
-bt_right_child(struct bt_matchfinder *mf, pos_t node)
+TEMPLATED(bt_right_child)(struct TEMPLATED(bt_matchfinder) *mf, pos_t node)
 {
-       return bt_child(mf, node, 1);
+       return TEMPLATED(bt_child)(mf, node, 1);
 }
 
 /*
@@ -168,16 +157,16 @@ bt_right_child(struct bt_matchfinder *mf, pos_t node)
  * array.  (If no matches were found, this will be the same as @lz_matchptr.)
  */
 static inline struct lz_match *
-bt_matchfinder_get_matches(struct bt_matchfinder * const restrict mf,
-                          const u8 * const in_begin,
-                          const u8 * const in_next,
-                          const unsigned min_len,
-                          const unsigned max_len,
-                          const unsigned nice_len,
-                          const unsigned max_search_depth,
-                          u32 * restrict next_hash,
-                          unsigned * restrict best_len_ret,
-                          struct lz_match * restrict lz_matchptr)
+TEMPLATED(bt_matchfinder_get_matches)(struct TEMPLATED(bt_matchfinder) * const restrict mf,
+                                     const u8 * const in_begin,
+                                     const u8 * const in_next,
+                                     const unsigned min_len,
+                                     const unsigned max_len,
+                                     const unsigned nice_len,
+                                     const unsigned max_search_depth,
+                                     u32 * restrict next_hash,
+                                     unsigned * restrict best_len_ret,
+                                     struct lz_match * restrict lz_matchptr)
 {
        unsigned depth_remaining = max_search_depth;
        u32 hash;
@@ -199,8 +188,8 @@ bt_matchfinder_get_matches(struct bt_matchfinder * const restrict mf,
        mf->hash_tab[hash] = in_next - in_begin;
        prefetchw(&mf->hash_tab[*next_hash]);
 
-       pending_lt_ptr = bt_left_child(mf, in_next - in_begin);
-       pending_gt_ptr = bt_right_child(mf, in_next - in_begin);
+       pending_lt_ptr = TEMPLATED(bt_left_child)(mf, in_next - in_begin);
+       pending_gt_ptr = TEMPLATED(bt_right_child)(mf, in_next - in_begin);
        best_lt_len = 0;
        best_gt_len = 0;
        len = 0;
@@ -223,8 +212,8 @@ bt_matchfinder_get_matches(struct bt_matchfinder * const restrict mf,
                                lz_matchptr->offset = in_next - matchptr;
                                lz_matchptr++;
                                if (len >= nice_len) {
-                                       *pending_lt_ptr = *bt_left_child(mf, cur_node);
-                                       *pending_gt_ptr = *bt_right_child(mf, cur_node);
+                                       *pending_lt_ptr = *TEMPLATED(bt_left_child)(mf, cur_node);
+                                       *pending_gt_ptr = *TEMPLATED(bt_right_child)(mf, cur_node);
                                        *best_len_ret = best_len;
                                        return lz_matchptr;
                                }
@@ -233,14 +222,14 @@ bt_matchfinder_get_matches(struct bt_matchfinder * const restrict mf,
 
                if (matchptr[len] < in_next[len]) {
                        *pending_lt_ptr = cur_node;
-                       pending_lt_ptr = bt_right_child(mf, cur_node);
+                       pending_lt_ptr = TEMPLATED(bt_right_child)(mf, cur_node);
                        cur_node = *pending_lt_ptr;
                        best_lt_len = len;
                        if (best_gt_len < len)
                                len = best_gt_len;
                } else {
                        *pending_gt_ptr = cur_node;
-                       pending_gt_ptr = bt_left_child(mf, cur_node);
+                       pending_gt_ptr = TEMPLATED(bt_left_child)(mf, cur_node);
                        cur_node = *pending_gt_ptr;
                        best_gt_len = len;
                        if (best_lt_len < len)
@@ -281,13 +270,13 @@ bt_matchfinder_get_matches(struct bt_matchfinder * const restrict mf,
  * actually record any matches.
  */
 static inline void
-bt_matchfinder_skip_position(struct bt_matchfinder * const restrict mf,
-                            const u8 * const in_begin,
-                            const u8 * const in_next,
-                            const u8 * const in_end,
-                            const unsigned nice_len,
-                            const unsigned max_search_depth,
-                            u32 * restrict next_hash)
+TEMPLATED(bt_matchfinder_skip_position)(struct TEMPLATED(bt_matchfinder) * const restrict mf,
+                                       const u8 * const in_begin,
+                                       const u8 * const in_next,
+                                       const u8 * const in_end,
+                                       const unsigned nice_len,
+                                       const unsigned max_search_depth,
+                                       u32 * restrict next_hash)
 {
        unsigned depth_remaining = max_search_depth;
        u32 hash;
@@ -307,8 +296,8 @@ bt_matchfinder_skip_position(struct bt_matchfinder * const restrict mf,
        prefetchw(&mf->hash_tab[*next_hash]);
 
        depth_remaining = max_search_depth;
-       pending_lt_ptr = bt_left_child(mf, in_next - in_begin);
-       pending_gt_ptr = bt_right_child(mf, in_next - in_begin);
+       pending_lt_ptr = TEMPLATED(bt_left_child)(mf, in_next - in_begin);
+       pending_gt_ptr = TEMPLATED(bt_right_child)(mf, in_next - in_begin);
        best_lt_len = 0;
        best_gt_len = 0;
        len = 0;
@@ -325,22 +314,22 @@ bt_matchfinder_skip_position(struct bt_matchfinder * const restrict mf,
                if (matchptr[len] == in_next[len]) {
                        len = lz_extend(in_next, matchptr, len + 1, nice_len);
                        if (len == nice_len) {
-                               *pending_lt_ptr = *bt_left_child(mf, cur_node);
-                               *pending_gt_ptr = *bt_right_child(mf, cur_node);
+                               *pending_lt_ptr = *TEMPLATED(bt_left_child)(mf, cur_node);
+                               *pending_gt_ptr = *TEMPLATED(bt_right_child)(mf, cur_node);
                                return;
                        }
                }
 
                if (matchptr[len] < in_next[len]) {
                        *pending_lt_ptr = cur_node;
-                       pending_lt_ptr = bt_right_child(mf, cur_node);
+                       pending_lt_ptr = TEMPLATED(bt_right_child)(mf, cur_node);
                        cur_node = *pending_lt_ptr;
                        best_lt_len = len;
                        if (best_gt_len < len)
                                len = best_gt_len;
                } else {
                        *pending_gt_ptr = cur_node;
-                       pending_gt_ptr = bt_left_child(mf, cur_node);
+                       pending_gt_ptr = TEMPLATED(bt_left_child)(mf, cur_node);
                        cur_node = *pending_gt_ptr;
                        best_gt_len = len;
                        if (best_lt_len < len)
@@ -354,5 +343,3 @@ bt_matchfinder_skip_position(struct bt_matchfinder * const restrict mf,
                }
        }
 }
-
-#endif /* _BT_MATCHFINDER_H */
index 1ea66968b10b6cbad675bcb55d55e823f95088a8..222f889bb7c308a7c37fe93e0bdc6e9b92ecaec3 100644 (file)
 #  define STATIC_ASSERT(expr)  ((void)sizeof(char[1 - 2 * !(expr)]))
 #endif
 
+#define CONCAT_IMPL(s1, s2)    s1##s2
+
+/* CONCAT() - concatenate two tokens at preprocessing time.  */
+#define CONCAT(s1, s2)         CONCAT_IMPL(s1, s2)
+
 #endif /* _WIMLIB_COMPILER_H */
index 8382a3cdab8388d4710d5189acd4a2d74bc953f1..6c7020e5979a58b59cf56a1e6ebd5c3a3f7d54b9 100644 (file)
  *
  *                             Notes on usage
  *
- * You must define MATCHFINDER_MAX_WINDOW_ORDER before including this header
- * because that determines which integer type to use for positions.  Since
- * 16-bit integers are faster than 32-bit integers due to reduced memory usage
- * (and therefore reduced cache pressure), the code only uses 32-bit integers if
- * they are needed to represent all possible positions.
+ * Before including this header, you must define 'pos_t' to an integer type that
+ * can represent all possible positions.  This can be a 16-bit or 32-bit
+ * unsigned integer.  When possible, the former should be used due to the
+ * reduced cache pressure.  This header can be included multiple times in a
+ * single .c file with different 'pos_t' definitions; however, you must define a
+ * different MF_SUFFIX each time to generate different names for the matchfinder
+ * structure and functions.
  *
  * The number of bytes that must be allocated for a given 'struct
  * hc_matchfinder' must be gotten by calling hc_matchfinder_size().
  * ----------------------------------------------------------------------------
  */
 
-#ifndef _HC_MATCHFINDER_H
-#define _HC_MATCHFINDER_H
-
-#ifndef MATCHFINDER_MAX_WINDOW_ORDER
-#  error "MATCHFINDER_MAX_WINDOW_ORDER must be defined!"
-#endif
-
 #include <string.h>
 
 #include "wimlib/lz_extend.h"
 #include "wimlib/lz_hash.h"
 #include "wimlib/unaligned.h"
 
-#if MATCHFINDER_MAX_WINDOW_ORDER <= 16
-typedef u16 pos_t;
-#else
-typedef u32 pos_t;
-#endif
-
 #define HC_MATCHFINDER_HASH3_ORDER     14
 #define HC_MATCHFINDER_HASH4_ORDER     15
 
-struct hc_matchfinder {
+/* TEMPLATED functions and structures have MF_SUFFIX appended to their name.  */
+#undef TEMPLATED
+#define TEMPLATED(name)                CONCAT(name, MF_SUFFIX)
+
+struct TEMPLATED(hc_matchfinder) {
 
        /* The hash table for finding length 3 matches  */
        pos_t hash3_tab[1UL << HC_MATCHFINDER_HASH3_ORDER];
@@ -139,14 +132,15 @@ struct hc_matchfinder {
 /* Return the number of bytes that must be allocated for a 'hc_matchfinder' that
  * can work with buffers up to the specified size.  */
 static inline size_t
-hc_matchfinder_size(size_t max_bufsize)
+TEMPLATED(hc_matchfinder_size)(size_t max_bufsize)
 {
-       return sizeof(struct hc_matchfinder) + (max_bufsize * sizeof(pos_t));
+       return sizeof(struct TEMPLATED(hc_matchfinder)) +
+               (max_bufsize * sizeof(pos_t));
 }
 
 /* Prepare the matchfinder for a new input buffer.  */
 static inline void
-hc_matchfinder_init(struct hc_matchfinder *mf)
+TEMPLATED(hc_matchfinder_init)(struct TEMPLATED(hc_matchfinder) *mf)
 {
        memset(mf, 0, sizeof(*mf));
 }
@@ -181,15 +175,15 @@ hc_matchfinder_init(struct hc_matchfinder *mf)
  * 'best_len' was found.
  */
 static inline u32
-hc_matchfinder_longest_match(struct hc_matchfinder * const restrict mf,
-                            const u8 * const restrict in_begin,
-                            const ptrdiff_t cur_pos,
-                            u32 best_len,
-                            const u32 max_len,
-                            const u32 nice_len,
-                            const u32 max_search_depth,
-                            u32 next_hashes[const restrict static 2],
-                            u32 * const restrict offset_ret)
+TEMPLATED(hc_matchfinder_longest_match)(struct TEMPLATED(hc_matchfinder) * const restrict mf,
+                                       const u8 * const restrict in_begin,
+                                       const ptrdiff_t cur_pos,
+                                       u32 best_len,
+                                       const u32 max_len,
+                                       const u32 nice_len,
+                                       const u32 max_search_depth,
+                                       u32 next_hashes[const restrict static 2],
+                                       u32 * const restrict offset_ret)
 {
        const u8 *in_next = in_begin + cur_pos;
        u32 depth_remaining = max_search_depth;
@@ -350,12 +344,12 @@ out:
  * Returns @in_next + @count.
  */
 static inline const u8 *
-hc_matchfinder_skip_positions(struct hc_matchfinder * const restrict mf,
-                             const u8 * const restrict in_begin,
-                             const ptrdiff_t cur_pos,
-                             const ptrdiff_t end_pos,
-                             const u32 count,
-                             u32 next_hashes[const restrict static 2])
+TEMPLATED(hc_matchfinder_skip_positions)(struct TEMPLATED(hc_matchfinder) * const restrict mf,
+                                        const u8 * const restrict in_begin,
+                                        const ptrdiff_t cur_pos,
+                                        const ptrdiff_t end_pos,
+                                        const u32 count,
+                                        u32 next_hashes[const restrict static 2])
 {
        const u8 *in_next = in_begin + cur_pos;
        const u8 * const stop_ptr = in_next + count;
@@ -386,5 +380,3 @@ hc_matchfinder_skip_positions(struct hc_matchfinder * const restrict mf,
 
        return stop_ptr;
 }
-
-#endif /* _HC_MATCHFINDER_H */
index b421c65ccd2808eb760078ae84084132140b48a0..ac65c5dcaf15f7aa01a8803dbb578472524da08f 100644 (file)
 #define ALIGNED_CODEWORD_LIMIT 7
 #define PRE_CODEWORD_LIMIT     7
 
-#include "wimlib/lzx_common.h"
-
-/*
- * The maximum allowed window order for the matchfinder.
- */
-#define MATCHFINDER_MAX_WINDOW_ORDER   LZX_MAX_WINDOW_ORDER
-
-#include <string.h>
-
-#include "wimlib/bt_matchfinder.h"
 #include "wimlib/compress_common.h"
 #include "wimlib/compressor_ops.h"
 #include "wimlib/error.h"
-#include "wimlib/hc_matchfinder.h"
 #include "wimlib/lz_extend.h"
+#include "wimlib/lzx_common.h"
 #include "wimlib/unaligned.h"
 #include "wimlib/util.h"
 
+/* Matchfinders with 16-bit positions  */
+#define pos_t  u16
+#define MF_SUFFIX _16
+#include "wimlib/bt_matchfinder.h"
+#include "wimlib/hc_matchfinder.h"
+
+/* Matchfinders with 32-bit positions  */
+#undef pos_t
+#undef MF_SUFFIX
+#define pos_t  u32
+#define MF_SUFFIX _32
+#include "wimlib/bt_matchfinder.h"
+#include "wimlib/hc_matchfinder.h"
+
 struct lzx_output_bitstream;
 
 /* Codewords for the LZX Huffman codes.  */
@@ -423,7 +427,10 @@ struct lzx_compressor {
                /* Data for greedy or lazy parsing  */
                struct {
                        /* Hash chains matchfinder (MUST BE LAST!!!)  */
-                       struct hc_matchfinder hc_mf;
+                       union {
+                               struct hc_matchfinder_16 hc_mf_16;
+                               struct hc_matchfinder_32 hc_mf_32;
+                       };
                };
 
                /* Data for near-optimal parsing  */
@@ -479,14 +486,45 @@ struct lzx_compressor {
                                                    LZX_MAX_MATCH_LEN - 1];
 
                        /* Hash table for finding length 2 matches  */
-                       pos_t hash2_tab[LZX_HASH2_LENGTH];
+                       u32 hash2_tab[LZX_HASH2_LENGTH];
 
                        /* Binary trees matchfinder (MUST BE LAST!!!)  */
-                       struct bt_matchfinder bt_mf;
+                       union {
+                               struct bt_matchfinder_16 bt_mf_16;
+                               struct bt_matchfinder_32 bt_mf_32;
+                       };
                };
        };
 };
 
+/*
+ * Will a matchfinder using 16-bit positions be sufficient for compressing
+ * buffers of up to the specified size?  The limit could be 65536 bytes, but we
+ * also want to optimize out the use of offset_slot_tab_2 in the 16-bit case.
+ * This requires that the limit be no more than the length of offset_slot_tab_1
+ * (currently 32768).
+ */
+static inline bool
+lzx_is_16_bit(size_t max_bufsize)
+{
+       STATIC_ASSERT(ARRAY_LEN(((struct lzx_compressor *)0)->offset_slot_tab_1) == 32768);
+       return max_bufsize <= 32768;
+}
+
+/*
+ * The following macros call either the 16-bit or the 32-bit version of a
+ * matchfinder function based on the value of 'is_16_bit', which will be known
+ * at compilation time.
+ */
+
+#define CALL_HC_MF(is_16_bit, c, funcname, ...)                                      \
+       ((is_16_bit) ? CONCAT(funcname, _16)(&(c)->hc_mf_16, ##__VA_ARGS__) : \
+                      CONCAT(funcname, _32)(&(c)->hc_mf_32, ##__VA_ARGS__));
+
+#define CALL_BT_MF(is_16_bit, c, funcname, ...)                                      \
+       ((is_16_bit) ? CONCAT(funcname, _16)(&(c)->bt_mf_16, ##__VA_ARGS__) : \
+                      CONCAT(funcname, _32)(&(c)->bt_mf_32, ##__VA_ARGS__));
+
 /*
  * Structure to keep track of the current state of sending bits to the
  * compressed output buffer.
@@ -1075,9 +1113,10 @@ lzx_choose_verbatim_or_aligned(const struct lzx_freqs * freqs,
  * compressor's acceleration tables to speed up the mapping.
  */
 static inline unsigned
-lzx_comp_get_offset_slot(struct lzx_compressor *c, u32 adjusted_offset)
+lzx_comp_get_offset_slot(struct lzx_compressor *c, u32 adjusted_offset,
+                        bool is_16_bit)
 {
-       if (adjusted_offset < ARRAY_LEN(c->offset_slot_tab_1))
+       if (is_16_bit || adjusted_offset < ARRAY_LEN(c->offset_slot_tab_1))
                return c->offset_slot_tab_1[adjusted_offset];
        return c->offset_slot_tab_2[adjusted_offset >> 14];
 }
@@ -1126,7 +1165,7 @@ lzx_record_literal(struct lzx_compressor *c, unsigned literal, u32 *litrunlen_p)
  * offsets queue.  */
 static inline void
 lzx_record_match(struct lzx_compressor *c, unsigned length, u32 offset_data,
-                u32 recent_offsets[LZX_NUM_RECENT_OFFSETS],
+                u32 recent_offsets[LZX_NUM_RECENT_OFFSETS], bool is_16_bit,
                 u32 *litrunlen_p, struct lzx_sequence **next_seq_p)
 {
        u32 litrunlen = *litrunlen_p;
@@ -1147,7 +1186,7 @@ lzx_record_match(struct lzx_compressor *c, unsigned length, u32 offset_data,
        }
 
        /* Compute the offset slot  */
-       offset_slot = lzx_comp_get_offset_slot(c, offset_data);
+       offset_slot = lzx_comp_get_offset_slot(c, offset_data, is_16_bit);
 
        /* Compute the match header.  */
        v += offset_slot * LZX_NUM_LEN_HEADERS;
@@ -1200,8 +1239,8 @@ lzx_finish_sequence(struct lzx_sequence *last_seq, u32 litrunlen)
  * beginning of the block), but this doesn't matter because this function only
  * computes frequencies.
  */
-static void
-lzx_tally_item_list(struct lzx_compressor *c, u32 block_size)
+static inline void
+lzx_tally_item_list(struct lzx_compressor *c, u32 block_size, bool is_16_bit)
 {
        u32 node_idx = block_size;
        for (;;) {
@@ -1244,7 +1283,7 @@ lzx_tally_item_list(struct lzx_compressor *c, u32 block_size)
                }
 
                /* Tally the main symbol.  */
-               offset_slot = lzx_comp_get_offset_slot(c, offset_data);
+               offset_slot = lzx_comp_get_offset_slot(c, offset_data, is_16_bit);
                v += offset_slot * LZX_NUM_LEN_HEADERS;
                c->freqs.main[LZX_NUM_CHARS + v]++;
 
@@ -1262,8 +1301,8 @@ lzx_tally_item_list(struct lzx_compressor *c, u32 block_size)
  * first-to-last order.  The return value is the index in c->chosen_sequences at
  * which the lzx_sequences begin.
  */
-static u32
-lzx_record_item_list(struct lzx_compressor *c, u32 block_size)
+static inline u32
+lzx_record_item_list(struct lzx_compressor *c, u32 block_size, bool is_16_bit)
 {
        u32 node_idx = block_size;
        u32 seq_idx = ARRAY_LEN(c->chosen_sequences) - 1;
@@ -1320,7 +1359,7 @@ lzx_record_item_list(struct lzx_compressor *c, u32 block_size)
                }
 
                /* Tally the main symbol.  */
-               offset_slot = lzx_comp_get_offset_slot(c, offset_data);
+               offset_slot = lzx_comp_get_offset_slot(c, offset_data, is_16_bit);
                v += offset_slot * LZX_NUM_LEN_HEADERS;
                c->freqs.main[LZX_NUM_CHARS + v]++;
 
@@ -1372,11 +1411,12 @@ out:
  * later.  The algorithm does not solve this problem; it only considers the
  * lowest cost to reach each individual position.
  */
-static struct lzx_lru_queue
+static inline struct lzx_lru_queue
 lzx_find_min_cost_path(struct lzx_compressor * const restrict c,
                       const u8 * const restrict block_begin,
                       const u32 block_size,
-                      const struct lzx_lru_queue initial_queue)
+                      const struct lzx_lru_queue initial_queue,
+                      bool is_16_bit)
 {
        struct lzx_optimum_node *cur_node = c->optimum_nodes;
        struct lzx_optimum_node * const end_node = &c->optimum_nodes[block_size];
@@ -1522,7 +1562,8 @@ lzx_find_min_cost_path(struct lzx_compressor * const restrict c,
                        do {
                                u32 offset = cache_ptr->offset;
                                u32 offset_data = offset + LZX_OFFSET_ADJUSTMENT;
-                               unsigned offset_slot = lzx_comp_get_offset_slot(c, offset_data);
+                               unsigned offset_slot = lzx_comp_get_offset_slot(c, offset_data,
+                                                                               is_16_bit);
                                do {
                                        u32 cost = cur_node->cost +
                                                   c->costs.match_cost[offset_slot][
@@ -1689,12 +1730,13 @@ lzx_update_costs(struct lzx_compressor *c)
        lzx_compute_match_costs(c);
 }
 
-static struct lzx_lru_queue
+static inline struct lzx_lru_queue
 lzx_optimize_and_write_block(struct lzx_compressor * const restrict c,
                             struct lzx_output_bitstream * const restrict os,
                             const u8 * const restrict block_begin,
                             const u32 block_size,
-                            const struct lzx_lru_queue initial_queue)
+                            const struct lzx_lru_queue initial_queue,
+                            bool is_16_bit)
 {
        unsigned num_passes_remaining = c->num_optim_passes;
        struct lzx_lru_queue new_queue;
@@ -1708,16 +1750,16 @@ lzx_optimize_and_write_block(struct lzx_compressor * const restrict c,
        lzx_reset_symbol_frequencies(c);
        do {
                new_queue = lzx_find_min_cost_path(c, block_begin, block_size,
-                                                  initial_queue);
+                                                  initial_queue, is_16_bit);
                if (num_passes_remaining > 1) {
-                       lzx_tally_item_list(c, block_size);
+                       lzx_tally_item_list(c, block_size, is_16_bit);
                        lzx_make_huffman_codes(c);
                        lzx_update_costs(c);
                        lzx_reset_symbol_frequencies(c);
                }
        } while (--num_passes_remaining);
 
-       seq_idx = lzx_record_item_list(c, block_size);
+       seq_idx = lzx_record_item_list(c, block_size, is_16_bit);
        lzx_finish_block(c, os, block_begin, block_size, seq_idx);
        return new_queue;
 }
@@ -1735,9 +1777,10 @@ lzx_optimize_and_write_block(struct lzx_compressor * const restrict c,
  * time, but rather to produce a compression ratio significantly better than a
  * simpler "greedy" or "lazy" parse while still being relatively fast.
  */
-static void
+static inline void
 lzx_compress_near_optimal(struct lzx_compressor *c,
-                         struct lzx_output_bitstream *os)
+                         struct lzx_output_bitstream *os,
+                         bool is_16_bit)
 {
        const u8 * const in_begin = c->in_buffer;
        const u8 *       in_next = in_begin;
@@ -1747,7 +1790,7 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
        u32 next_hash;
        struct lzx_lru_queue queue;
 
-       bt_matchfinder_init(&c->bt_mf);
+       CALL_BT_MF(is_16_bit, c, bt_matchfinder_init);
        memset(c->hash2_tab, 0, sizeof(c->hash2_tab));
        next_hash = bt_matchfinder_hash_3_bytes(in_next);
        lzx_lru_queue_init(&queue);
@@ -1802,16 +1845,16 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                        }
 
                        /* Check for matches of length >= 3.  */
-                       lz_matchptr = bt_matchfinder_get_matches(&c->bt_mf,
-                                                                in_begin,
-                                                                in_next,
-                                                                3,
-                                                                max_len,
-                                                                nice_len,
-                                                                c->max_search_depth,
-                                                                &next_hash,
-                                                                &best_len,
-                                                                lz_matchptr);
+                       lz_matchptr = CALL_BT_MF(is_16_bit, c, bt_matchfinder_get_matches,
+                                                in_begin,
+                                                in_next,
+                                                3,
+                                                max_len,
+                                                nice_len,
+                                                c->max_search_depth,
+                                                &next_hash,
+                                                &best_len,
+                                                lz_matchptr);
                        in_next++;
                        cache_ptr->length = lz_matchptr - (cache_ptr + 1);
                        cache_ptr = lz_matchptr;
@@ -1843,13 +1886,13 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
                                        }
                                        c->hash2_tab[lz_hash_2_bytes(in_next, LZX_HASH2_ORDER)] =
                                                in_next - in_begin;
-                                       bt_matchfinder_skip_position(&c->bt_mf,
-                                                                    in_begin,
-                                                                    in_next,
-                                                                    in_end,
-                                                                    nice_len,
-                                                                    c->max_search_depth,
-                                                                    &next_hash);
+                                       CALL_BT_MF(is_16_bit, c, bt_matchfinder_skip_position,
+                                                  in_begin,
+                                                  in_next,
+                                                  in_end,
+                                                  nice_len,
+                                                  c->max_search_depth,
+                                                  &next_hash);
                                        in_next++;
                                        cache_ptr->length = 0;
                                        cache_ptr++;
@@ -1863,10 +1906,24 @@ lzx_compress_near_optimal(struct lzx_compressor *c,
 
                queue = lzx_optimize_and_write_block(c, os, in_block_begin,
                                                     in_next - in_block_begin,
-                                                    queue);
+                                                    queue, is_16_bit);
        } while (in_next != in_end);
 }
 
+static void
+lzx_compress_near_optimal_16(struct lzx_compressor *c,
+                            struct lzx_output_bitstream *os)
+{
+       lzx_compress_near_optimal(c, os, true);
+}
+
+static void
+lzx_compress_near_optimal_32(struct lzx_compressor *c,
+                            struct lzx_output_bitstream *os)
+{
+       lzx_compress_near_optimal(c, os, false);
+}
+
 /*
  * Given a pointer to the current byte sequence and the current list of recent
  * match offsets, find the longest repeat offset match.
@@ -1943,8 +2000,9 @@ lzx_repeat_offset_match_score(unsigned rep_len, unsigned rep_idx)
 }
 
 /* This is the "lazy" LZX compressor.  */
-static void
-lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
+static inline void
+lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os,
+                 bool is_16_bit)
 {
        const u8 * const in_begin = c->in_buffer;
        const u8 *       in_next = in_begin;
@@ -1955,7 +2013,7 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
        u32 recent_offsets[3] = {1, 1, 1};
        u32 next_hashes[2] = {};
 
-       hc_matchfinder_init(&c->hc_mf);
+       CALL_HC_MF(is_16_bit, c, hc_matchfinder_init);
 
        do {
                /* Starting a new block  */
@@ -1988,15 +2046,15 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
 
                        /* Find the longest match at the current position.  */
 
-                       cur_len = hc_matchfinder_longest_match(&c->hc_mf,
-                                                              in_begin,
-                                                              in_next - in_begin,
-                                                              2,
-                                                              max_len,
-                                                              nice_len,
-                                                              c->max_search_depth,
-                                                              next_hashes,
-                                                              &cur_offset);
+                       cur_len = CALL_HC_MF(is_16_bit, c, hc_matchfinder_longest_match,
+                                            in_begin,
+                                            in_next - in_begin,
+                                            2,
+                                            max_len,
+                                            nice_len,
+                                            c->max_search_depth,
+                                            next_hashes,
+                                            &cur_offset);
                        if (cur_len < 3 ||
                            (cur_len == 3 &&
                             cur_offset >= 8192 - LZX_OFFSET_ADJUSTMENT &&
@@ -2054,15 +2112,15 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
                                nice_len = min(max_len, nice_len);
                        }
 
-                       next_len = hc_matchfinder_longest_match(&c->hc_mf,
-                                                               in_begin,
-                                                               in_next - in_begin,
-                                                               cur_len - 2,
-                                                               max_len,
-                                                               nice_len,
-                                                               c->max_search_depth / 2,
-                                                               next_hashes,
-                                                               &next_offset);
+                       next_len = CALL_HC_MF(is_16_bit, c, hc_matchfinder_longest_match,
+                                             in_begin,
+                                             in_next - in_begin,
+                                             cur_len - 2,
+                                             max_len,
+                                             nice_len,
+                                             c->max_search_depth / 2,
+                                             next_hashes,
+                                             &next_offset);
 
                        if (next_len <= cur_len - 2) {
                                in_next++;
@@ -2112,13 +2170,14 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
 
                choose_cur_match:
                        lzx_record_match(c, cur_len, cur_offset_data,
-                                        recent_offsets, &litrunlen, &next_seq);
-                       in_next = hc_matchfinder_skip_positions(&c->hc_mf,
-                                                               in_begin,
-                                                               in_next - in_begin,
-                                                               in_end - in_begin,
-                                                               skip_len,
-                                                               next_hashes);
+                                        recent_offsets, is_16_bit,
+                                        &litrunlen, &next_seq);
+                       in_next = CALL_HC_MF(is_16_bit, c, hc_matchfinder_skip_positions,
+                                            in_begin,
+                                            in_next - in_begin,
+                                            in_end - in_begin,
+                                            skip_len,
+                                            next_hashes);
                } while (in_next < in_block_end);
 
                lzx_finish_sequence(next_seq, litrunlen);
@@ -2128,6 +2187,18 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os)
        } while (in_next != in_end);
 }
 
+static void
+lzx_compress_lazy_16(struct lzx_compressor *c, struct lzx_output_bitstream *os)
+{
+       lzx_compress_lazy(c, os, true);
+}
+
+static void
+lzx_compress_lazy_32(struct lzx_compressor *c, struct lzx_output_bitstream *os)
+{
+       lzx_compress_lazy(c, os, false);
+}
+
 /* Generate the acceleration tables for offset slots.  */
 static void
 lzx_init_offset_slot_tabs(struct lzx_compressor *c)
@@ -2158,11 +2229,19 @@ static size_t
 lzx_get_compressor_size(size_t max_bufsize, unsigned compression_level)
 {
        if (compression_level <= LZX_MAX_FAST_LEVEL) {
-               return offsetof(struct lzx_compressor, hc_mf) +
-                       hc_matchfinder_size(max_bufsize);
+               if (lzx_is_16_bit(max_bufsize))
+                       return offsetof(struct lzx_compressor, hc_mf_16) +
+                              hc_matchfinder_size_16(max_bufsize);
+               else
+                       return offsetof(struct lzx_compressor, hc_mf_32) +
+                              hc_matchfinder_size_32(max_bufsize);
        } else {
-               return offsetof(struct lzx_compressor, bt_mf) +
-                       bt_matchfinder_size(max_bufsize);
+               if (lzx_is_16_bit(max_bufsize))
+                       return offsetof(struct lzx_compressor, bt_mf_16) +
+                              bt_matchfinder_size_16(max_bufsize);
+               else
+                       return offsetof(struct lzx_compressor, bt_mf_32) +
+                              bt_matchfinder_size_32(max_bufsize);
        }
 }
 
@@ -2211,7 +2290,10 @@ lzx_create_compressor(size_t max_bufsize, unsigned compression_level,
 
                /* Fast compression: Use lazy parsing.  */
 
-               c->impl = lzx_compress_lazy;
+               if (lzx_is_16_bit(max_bufsize))
+                       c->impl = lzx_compress_lazy_16;
+               else
+                       c->impl = lzx_compress_lazy_32;
                c->max_search_depth = (36 * compression_level) / 20;
                c->nice_match_length = (72 * compression_level) / 20;
 
@@ -2224,7 +2306,10 @@ lzx_create_compressor(size_t max_bufsize, unsigned compression_level,
 
                /* Normal / high compression: Use near-optimal parsing.  */
 
-               c->impl = lzx_compress_near_optimal;
+               if (lzx_is_16_bit(max_bufsize))
+                       c->impl = lzx_compress_near_optimal_16;
+               else
+                       c->impl = lzx_compress_near_optimal_32;
 
                /* Scale nice_match_length and max_search_depth with the
                 * compression level.  */
index 5298af64ddb342e8598ee090cef50d48393738c8..7020a88bba7013affb6ccfcf05ee7970e89a8d18 100644 (file)
 #define MIN_LEVEL_FOR_NEAR_OPTIMAL     60
 
 /*
- * The maximum window order for the matchfinder.  This must be the base 2
- * logarithm of the maximum buffer size.
+ * Matchfinder definitions.  For XPRESS, only a 16-bit matchfinder is needed.
  */
-#define MATCHFINDER_MAX_WINDOW_ORDER   16
+#define pos_t u16
+#define MF_SUFFIX
 
 /*
  * Note: although XPRESS can potentially use a sliding window, it isn't well
@@ -59,8 +59,6 @@
  * optimizations.
  */
 
-#include <string.h>
-
 #include "wimlib/bitops.h"
 #include "wimlib/compress_common.h"
 #include "wimlib/compressor_ops.h"