+static void
+lzms_init_lz_lru_queue(struct lzms_lz_lru_queue *queue)
+{
+ for (int i = 0; i < LZMS_NUM_RECENT_OFFSETS + 1; i++)
+ queue->recent_offsets[i] = i + 1;
+
+ queue->prev_offset = 0;
+ queue->upcoming_offset = 0;
+}
+
+static void
+lzms_update_lz_lru_queue(struct lzms_lz_lru_queue *queue)
+{
+ if (queue->prev_offset != 0) {
+ for (int i = LZMS_NUM_RECENT_OFFSETS - 1; i >= 0; i--)
+ queue->recent_offsets[i + 1] = queue->recent_offsets[i];
+ queue->recent_offsets[0] = queue->prev_offset;
+ }
+ queue->prev_offset = queue->upcoming_offset;
+}
+
+/*
+ * Match chooser position data:
+ *
+ * An array of these structures is used during the near-optimal match-choosing
+ * algorithm. They correspond to consecutive positions in the window and are
+ * used to keep track of the cost to reach each position, and the match/literal
+ * choices that need to be chosen to reach that position.
+ */
+struct lzms_mc_pos_data {
+
+ /* The cost, in bits, of the lowest-cost path that has been found to
+ * reach this position. This can change as progressively lower cost
+ * paths are found to reach this position. */
+ u32 cost;
+#define MC_INFINITE_COST UINT32_MAX
+
+ /* The match or literal that was taken to reach this position. This can
+ * change as progressively lower cost paths are found to reach this
+ * position.
+ *
+ * This variable is divided into two bitfields.
+ *
+ * Literals:
+ * Low bits are 1, high bits are the literal.
+ *
+ * Explicit offset matches:
+ * Low bits are the match length, high bits are the offset plus 2.
+ *
+ * Repeat offset matches:
+ * Low bits are the match length, high bits are the queue index.
+ */
+ u64 mc_item_data;
+#define MC_OFFSET_SHIFT 32
+#define MC_LEN_MASK (((u64)1 << MC_OFFSET_SHIFT) - 1)
+
+ /* The LZMS adaptive state that exists at this position. This is filled
+ * in lazily, only after the minimum-cost path to this position is
+ * found.
+ *
+ * Note: the way we handle this adaptive state in the "minimum-cost"
+ * parse is actually only an approximation. It's possible for the
+ * globally optimal, minimum cost path to contain a prefix, ending at a
+ * position, where that path prefix is *not* the minimum cost path to
+ * that position. This can happen if such a path prefix results in a
+ * different adaptive state which results in lower costs later. We do
+ * not solve this problem; we only consider the lowest cost to reach
+ * each position, which seems to be an acceptable approximation.
+ *
+ * Note: this adaptive state also does not include the probability
+ * entries or current Huffman codewords. Those aren't maintained
+ * per-position and are only updated occassionally. */
+ struct lzms_adaptive_state {
+ struct lzms_lz_lru_queue lru;
+ u8 main_state;
+ u8 match_state;
+ u8 lz_match_state;
+ u8 lz_repeat_match_state[LZMS_NUM_RECENT_OFFSETS - 1];
+ } state;
+};
+
+static void
+lzms_init_fast_slots(struct lzms_compressor *c)
+{
+ /* Create table mapping small lengths to length slots. */
+ for (unsigned slot = 0, i = 0; i < LZMS_NUM_FAST_LENGTHS; i++) {
+ while (i >= lzms_length_slot_base[slot + 1])
+ slot++;
+ c->length_slot_fast[i] = slot;
+ }
+
+ /* Create table mapping small offsets to offset slots. */
+ for (unsigned slot = 0, i = 0; i < LZMS_NUM_FAST_OFFSETS; i++) {
+ while (i >= lzms_offset_slot_base[slot + 1])
+ slot++;
+ c->offset_slot_fast[i] = slot;
+ }
+}
+
+static inline unsigned
+lzms_get_length_slot_fast(const struct lzms_compressor *c, u32 length)
+{
+ if (likely(length < LZMS_NUM_FAST_LENGTHS))
+ return c->length_slot_fast[length];
+ else
+ return lzms_get_length_slot(length);
+}
+
+static inline unsigned
+lzms_get_offset_slot_fast(const struct lzms_compressor *c, u32 offset)
+{
+ if (offset < LZMS_NUM_FAST_OFFSETS)
+ return c->offset_slot_fast[offset];
+ else
+ return lzms_get_offset_slot(offset);
+}
+
+/* Initialize the output bitstream @os to write backwards to the specified