+ lzms_output_bitstream_put_bits(enc->os,
+ enc->codewords[sym],
+ enc->lens[sym]);
+ ++enc->num_syms_written;
+ ++enc->sym_freqs[sym];
+}
+
+/* Encode a number as a Huffman symbol specifying a slot, plus a number of
+ * slot-dependent extra bits. */
+static void
+lzms_encode_value(struct lzms_huffman_encoder *enc, u32 value)
+{
+ unsigned slot;
+ unsigned num_extra_bits;
+ u32 extra_bits;
+
+ LZMS_ASSERT(enc->slot_base_tab != NULL);
+
+ slot = lzms_get_slot(value, enc->slot_base_tab, enc->num_syms);
+
+ /* Get the number of extra bits needed to represent the range of values
+ * that share the slot. */
+ num_extra_bits = bsr32(enc->slot_base_tab[slot + 1] -
+ enc->slot_base_tab[slot]);
+
+ /* Calculate the extra bits as the offset from the slot base. */
+ extra_bits = value - enc->slot_base_tab[slot];
+
+ /* Output the slot (Huffman-encoded), then the extra bits (verbatim).
+ */
+ lzms_huffman_encode_symbol(enc, slot);
+ lzms_output_bitstream_put_bits(enc->os, extra_bits, num_extra_bits);
+}
+
+static void
+lzms_begin_encode_item(struct lzms_compressor *ctx)
+{
+ ctx->lru.lz.upcoming_offset = 0;
+ ctx->lru.delta.upcoming_offset = 0;
+ ctx->lru.delta.upcoming_power = 0;
+}
+
+static void
+lzms_end_encode_item(struct lzms_compressor *ctx, u32 length)
+{
+ LZMS_ASSERT(ctx->window_size - ctx->cur_window_pos >= length);
+ ctx->cur_window_pos += length;
+ lzms_update_lru_queues(&ctx->lru);
+}
+
+/* Encode a literal byte. */
+static void
+lzms_encode_literal(struct lzms_compressor *ctx, u8 literal)
+{
+ LZMS_DEBUG("Position %u: Encoding literal 0x%02x ('%c')",
+ ctx->cur_window_pos, literal, literal);
+
+ lzms_begin_encode_item(ctx);
+
+ /* Main bit: 0 = a literal, not a match. */
+ lzms_range_encode_bit(&ctx->main_range_encoder, 0);
+
+ /* Encode the literal using the current literal Huffman code. */
+ lzms_huffman_encode_symbol(&ctx->literal_encoder, literal);
+
+ lzms_end_encode_item(ctx, 1);
+}
+
+/* Encode a (length, offset) pair (LZ match). */
+static void
+lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset)
+{
+ int recent_offset_idx;
+
+ LZMS_ASSERT(!memcmp(&ctx->window[ctx->cur_window_pos],
+ &ctx->window[ctx->cur_window_pos - offset],
+ length));
+
+ lzms_begin_encode_item(ctx);
+
+ LZMS_DEBUG("Position %u: Encoding LZ match {length=%u, offset=%u}",
+ ctx->cur_window_pos, length, offset);
+
+ /* Main bit: 1 = a match, not a literal. */
+ lzms_range_encode_bit(&ctx->main_range_encoder, 1);
+
+ /* Match bit: 0 = a LZ match, not a delta match. */
+ lzms_range_encode_bit(&ctx->match_range_encoder, 0);
+
+ /* Determine if the offset can be represented as a recent offset. */
+ for (recent_offset_idx = 0;
+ recent_offset_idx < LZMS_NUM_RECENT_OFFSETS;
+ recent_offset_idx++)
+ if (offset == ctx->lru.lz.recent_offsets[recent_offset_idx])
+ break;
+
+ if (recent_offset_idx == LZMS_NUM_RECENT_OFFSETS) {
+ /* Explicit offset. */
+
+ /* LZ match bit: 0 = explicit offset, not a repeat offset. */
+ lzms_range_encode_bit(&ctx->lz_match_range_encoder, 0);
+
+ /* Encode the match offset. */
+ lzms_encode_value(&ctx->lz_offset_encoder, offset);
+ } else {
+ int i;
+
+ /* Repeat offset. */
+
+ /* LZ match bit: 1 = repeat offset, not an explicit offset. */
+ lzms_range_encode_bit(&ctx->lz_match_range_encoder, 1);
+
+ /* Encode the recent offset index. A 1 bit is encoded for each
+ * index passed up. This sequence of 1 bits is terminated by a
+ * 0 bit, or automatically when (LZMS_NUM_RECENT_OFFSETS - 1) 1
+ * bits have been encoded. */
+ for (i = 0; i < recent_offset_idx; i++)
+ lzms_range_encode_bit(&ctx->lz_repeat_match_range_encoders[i], 1);
+
+ if (i < LZMS_NUM_RECENT_OFFSETS - 1)
+ lzms_range_encode_bit(&ctx->lz_repeat_match_range_encoders[i], 0);
+
+ /* Initial update of the LZ match offset LRU queue. */
+ for (; i < LZMS_NUM_RECENT_OFFSETS; i++)
+ ctx->lru.lz.recent_offsets[i] = ctx->lru.lz.recent_offsets[i + 1];
+ }
+
+ /* Encode the match length. */
+ lzms_encode_value(&ctx->length_encoder, length);
+
+ /* Save the match offset for later insertion at the front of the LZ
+ * match offset LRU queue. */
+ ctx->lru.lz.upcoming_offset = offset;
+
+ lzms_end_encode_item(ctx, length);
+}
+
+static void
+lzms_record_literal(u8 literal, void *_ctx)
+{
+ struct lzms_compressor *ctx = _ctx;
+
+ lzms_encode_literal(ctx, literal);
+}
+
+static void
+lzms_record_match(unsigned length, unsigned offset, void *_ctx)
+{
+ struct lzms_compressor *ctx = _ctx;
+
+ lzms_encode_lz_match(ctx, length, offset);
+}
+
+static void
+lzms_fast_encode(struct lzms_compressor *ctx)
+{
+ static const struct lz_params lzms_lz_params = {
+ .min_match = 3,
+ .max_match = UINT_MAX,
+ .max_offset = UINT_MAX,
+ .nice_match = 64,
+ .good_match = 32,
+ .max_chain_len = 64,
+ .max_lazy_match = 258,
+ .too_far = 4096,
+ };
+
+ lz_analyze_block(ctx->window,
+ ctx->window_size,
+ lzms_record_match,
+ lzms_record_literal,
+ ctx,
+ &lzms_lz_params,
+ ctx->prev_tab);
+
+}
+
+/* Fast heuristic cost evaluation to use in the inner loop of the match-finder.
+ * Unlike lzms_get_match_cost(), which does a true cost evaluation, this simply
+ * prioritize matches based on their offset. */
+static input_idx_t
+lzms_match_cost_fast(input_idx_t length, input_idx_t offset, const void *_lru)
+{
+ const struct lzms_lz_lru_queues *lru = _lru;
+
+ for (input_idx_t i = 0; i < LZMS_NUM_RECENT_OFFSETS; i++)
+ if (offset == lru->recent_offsets[i])
+ return i;
+
+ return offset;
+}
+
+static u32
+lzms_rc_bit_cost(const struct lzms_range_encoder *enc, u8 *cur_state, int bit)
+{
+ u32 prob;
+ u32 cost;
+
+ prob = enc->prob_entries[*cur_state & enc->mask].num_recent_zero_bits;
+ if (prob == 0)
+ prob = 1;
+ else if (prob == LZMS_PROBABILITY_MAX)
+ prob = LZMS_PROBABILITY_MAX - 1;
+
+ if (bit == 0)
+ prob = LZMS_PROBABILITY_MAX - prob;
+
+ cost = prob * 2; /* TODO */
+
+ *cur_state = (*cur_state << 1) | bit;
+
+ return cost;
+}
+
+#define LZMS_COST_SCALE 64
+
+static u32
+lzms_huffman_symbol_cost(const struct lzms_huffman_encoder *enc, u32 sym)
+{
+ return enc->lens[sym] * LZMS_COST_SCALE;
+}
+
+static u32
+lzms_value_cost(const struct lzms_huffman_encoder *enc, u32 value)
+{
+ u32 slot;
+ u32 num_extra_bits;
+ u32 cost = 0;
+
+ slot = lzms_get_slot(value, enc->slot_base_tab, enc->num_syms);
+
+ cost += lzms_huffman_symbol_cost(enc, slot);
+
+ num_extra_bits = bsr32(enc->slot_base_tab[slot + 1] -
+ enc->slot_base_tab[slot]);
+
+ cost += num_extra_bits * LZMS_COST_SCALE;
+
+ return cost;
+}
+
+static u32
+lzms_get_matches(struct lzms_compressor *ctx,
+ const struct lzms_adaptive_state *cost_state,
+ struct raw_match **matches_ret)
+{
+ u32 num_matches;
+ struct raw_match *matches = ctx->matches;
+
+ num_matches = lz_sarray_get_matches(&ctx->lz_sarray,
+ matches,
+ lzms_match_cost_fast,
+ &cost_state->lru);
+
+#ifdef ENABLE_LZMS_DEBUG
+ u32 curpos = lz_sarray_get_pos(&ctx->lz_sarray) - 1;
+ LZMS_ASSERT(curpos >= 0);
+ for (u32 i = 0; i < num_matches; i++) {
+ LZMS_ASSERT(matches[i].len <= ctx->window_size - curpos);
+ LZMS_ASSERT(matches[i].offset > 0);
+ LZMS_ASSERT(matches[i].offset <= curpos);
+ LZMS_ASSERT(!memcmp(&ctx->window[curpos],
+ &ctx->window[curpos - matches[i].offset],
+ matches[i].len));
+ if (i > 0)
+ LZMS_ASSERT(matches[i - 1].len > matches[i].len);
+
+ }
+#endif
+
+ *matches_ret = matches;
+ return num_matches;
+}
+
+static void
+lzms_skip_bytes(struct lzms_compressor *ctx, input_idx_t n)
+{
+ while (n--)
+ lz_sarray_skip_position(&ctx->lz_sarray);
+}
+
+static u32
+lzms_get_prev_literal_cost(struct lzms_compressor *ctx,
+ struct lzms_adaptive_state *cost_state)
+{
+ u8 literal = ctx->window[lz_sarray_get_pos(&ctx->lz_sarray) - 1];
+ u32 cost = 0;
+
+ cost_state->lru.upcoming_offset = 0;
+ lzms_update_lz_lru_queues(&cost_state->lru);
+
+ cost += lzms_rc_bit_cost(&ctx->main_range_encoder,
+ &cost_state->main_state, 0);
+ cost += lzms_huffman_symbol_cost(&ctx->literal_encoder, literal);
+
+ return cost;
+}
+
+static u32
+lzms_get_match_cost(struct lzms_compressor *ctx,
+ struct lzms_adaptive_state *cost_state,
+ input_idx_t length, input_idx_t offset)
+{
+ u32 cost = 0;
+ int recent_offset_idx;
+
+ cost += lzms_rc_bit_cost(&ctx->main_range_encoder,
+ &cost_state->main_state, 1);
+ cost += lzms_rc_bit_cost(&ctx->match_range_encoder,
+ &cost_state->match_state, 0);
+
+ for (recent_offset_idx = 0;
+ recent_offset_idx < LZMS_NUM_RECENT_OFFSETS;
+ recent_offset_idx++)
+ if (offset == cost_state->lru.recent_offsets[recent_offset_idx])
+ break;
+
+ if (recent_offset_idx == LZMS_NUM_RECENT_OFFSETS) {
+ /* Explicit offset. */
+ cost += lzms_rc_bit_cost(&ctx->lz_match_range_encoder,
+ &cost_state->lz_match_state, 0);
+
+ cost += lzms_value_cost(&ctx->lz_offset_encoder, offset);
+ } else {
+ int i;
+
+ /* Repeat offset. */
+ cost += lzms_rc_bit_cost(&ctx->lz_match_range_encoder,
+ &cost_state->lz_match_state, 1);
+
+ for (i = 0; i < recent_offset_idx; i++)
+ cost++; /* TODO */
+
+ if (i < LZMS_NUM_RECENT_OFFSETS - 1)
+ cost++; /* TODO */
+
+ /* Initial update of the LZ match offset LRU queue. */
+ for (; i < LZMS_NUM_RECENT_OFFSETS; i++)
+ cost_state->lru.recent_offsets[i] = cost_state->lru.recent_offsets[i + 1];
+ }
+
+ cost += lzms_value_cost(&ctx->length_encoder, length);
+
+ cost_state->lru.upcoming_offset = offset;
+ lzms_update_lz_lru_queues(&cost_state->lru);
+
+ return cost;
+}
+
+static struct raw_match
+lzms_get_near_optimal_match(struct lzms_compressor *ctx)
+{
+ struct lzms_adaptive_state initial_state = {
+ .lru = ctx->lru.lz,
+ .main_state = ctx->main_range_encoder.state,
+ .match_state = ctx->match_range_encoder.state,
+ .lz_match_state = ctx->lz_match_range_encoder.state,
+ };
+ return lz_get_near_optimal_match(&ctx->mc,
+ lzms_get_matches,
+ lzms_skip_bytes,
+ lzms_get_prev_literal_cost,
+ lzms_get_match_cost,
+ ctx,
+ &initial_state);
+}
+
+static void
+lzms_slow_encode(struct lzms_compressor *ctx)
+{
+ struct raw_match match;
+
+ /* Load window into suffix array match-finder. */
+ lz_sarray_load_window(&ctx->lz_sarray, ctx->window, ctx->window_size);
+
+ /* Reset the match-chooser. */
+ lz_match_chooser_begin(&ctx->mc);
+
+ /* TODO */
+ while (ctx->cur_window_pos != ctx->window_size) {
+
+ match = lzms_get_near_optimal_match(ctx);
+ if (match.len <= 1) {
+ /* Literal */
+ lzms_encode_literal(ctx, ctx->window[ctx->cur_window_pos]);
+ } else {
+ /* LZ match */
+ lzms_encode_lz_match(ctx, match.len, match.offset);
+ }
+ }
+}
+
+static void
+lzms_init_range_encoder(struct lzms_range_encoder *enc,
+ struct lzms_range_encoder_raw *rc, u32 num_states)
+{
+ enc->rc = rc;
+ enc->state = 0;
+ enc->mask = num_states - 1;
+ for (u32 i = 0; i < num_states; i++) {
+ enc->prob_entries[i].num_recent_zero_bits = LZMS_INITIAL_PROBABILITY;
+ enc->prob_entries[i].recent_bits = LZMS_INITIAL_RECENT_BITS;
+ }
+}
+
+static void
+lzms_init_huffman_encoder(struct lzms_huffman_encoder *enc,
+ struct lzms_output_bitstream *os,
+ const u32 *slot_base_tab,
+ unsigned num_syms,
+ unsigned rebuild_freq)
+{
+ enc->os = os;
+ enc->slot_base_tab = slot_base_tab;
+ enc->num_syms_written = rebuild_freq;
+ enc->rebuild_freq = rebuild_freq;
+ enc->num_syms = num_syms;
+ for (unsigned i = 0; i < num_syms; i++)
+ enc->sym_freqs[i] = 1;
+}
+
+/* Initialize the LZMS compressor. */
+static void
+lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen,
+ le16 *cdata, u32 clen16)
+{
+ unsigned num_position_slots;
+
+ /* Copy the uncompressed data into the @ctx->window buffer. */
+ memcpy(ctx->window, udata, ulen);
+ memset(&ctx->window[ulen], 0, 8);
+ ctx->cur_window_pos = 0;
+ ctx->window_size = ulen;
+
+ /* Initialize the raw range encoder (writing forwards). */
+ lzms_range_encoder_raw_init(&ctx->rc, cdata, clen16);
+
+ /* Initialize the output bitstream for Huffman symbols and verbatim bits
+ * (writing backwards). */
+ lzms_output_bitstream_init(&ctx->os, cdata, clen16);
+
+ /* Initialize position and length slot bases if not done already. */
+ lzms_init_slot_bases();
+
+ /* Calculate the number of position slots needed for this compressed
+ * block. */
+ num_position_slots = lzms_get_position_slot(ulen - 1) + 1;
+
+ LZMS_DEBUG("Using %u position slots", num_position_slots);
+
+ /* Initialize Huffman encoders for each alphabet used in the compressed
+ * representation. */
+ lzms_init_huffman_encoder(&ctx->literal_encoder, &ctx->os,
+ NULL, LZMS_NUM_LITERAL_SYMS,
+ LZMS_LITERAL_CODE_REBUILD_FREQ);
+
+ lzms_init_huffman_encoder(&ctx->lz_offset_encoder, &ctx->os,
+ lzms_position_slot_base, num_position_slots,
+ LZMS_LZ_OFFSET_CODE_REBUILD_FREQ);
+
+ lzms_init_huffman_encoder(&ctx->length_encoder, &ctx->os,
+ lzms_length_slot_base, LZMS_NUM_LEN_SYMS,
+ LZMS_LENGTH_CODE_REBUILD_FREQ);
+
+ lzms_init_huffman_encoder(&ctx->delta_offset_encoder, &ctx->os,
+ lzms_position_slot_base, num_position_slots,
+ LZMS_DELTA_OFFSET_CODE_REBUILD_FREQ);
+
+ lzms_init_huffman_encoder(&ctx->delta_power_encoder, &ctx->os,
+ NULL, LZMS_NUM_DELTA_POWER_SYMS,
+ LZMS_DELTA_POWER_CODE_REBUILD_FREQ);
+
+ /* Initialize range encoders, all of which wrap around the same
+ * lzms_range_encoder_raw. */
+ lzms_init_range_encoder(&ctx->main_range_encoder,
+ &ctx->rc, LZMS_NUM_MAIN_STATES);
+
+ lzms_init_range_encoder(&ctx->match_range_encoder,
+ &ctx->rc, LZMS_NUM_MATCH_STATES);
+
+ lzms_init_range_encoder(&ctx->lz_match_range_encoder,
+ &ctx->rc, LZMS_NUM_LZ_MATCH_STATES);
+
+ for (size_t i = 0; i < ARRAY_LEN(ctx->lz_repeat_match_range_encoders); i++)
+ lzms_init_range_encoder(&ctx->lz_repeat_match_range_encoders[i],
+ &ctx->rc, LZMS_NUM_LZ_REPEAT_MATCH_STATES);
+
+ lzms_init_range_encoder(&ctx->delta_match_range_encoder,
+ &ctx->rc, LZMS_NUM_DELTA_MATCH_STATES);
+
+ for (size_t i = 0; i < ARRAY_LEN(ctx->delta_repeat_match_range_encoders); i++)
+ lzms_init_range_encoder(&ctx->delta_repeat_match_range_encoders[i],
+ &ctx->rc, LZMS_NUM_DELTA_REPEAT_MATCH_STATES);
+
+ /* Initialize LRU match information. */
+ lzms_init_lru_queues(&ctx->lru);
+}
+
+/* Flush the output streams, prepare the final compressed data, and return its
+ * size in bytes.
+ *
+ * A return value of 0 indicates that the data could not be compressed to fit in
+ * the available space. */
+static size_t
+lzms_finalize(struct lzms_compressor *ctx, u8 *cdata, size_t csize_avail)
+{
+ size_t num_forwards_bytes;
+ size_t num_backwards_bytes;
+ size_t compressed_size;
+
+ /* Flush both the forwards and backwards streams, and make sure they
+ * didn't cross each other and start overwriting each other's data. */
+ if (!lzms_output_bitstream_flush(&ctx->os)) {
+ LZMS_DEBUG("Backwards bitstream overrun.");
+ return 0;
+ }
+
+ if (!lzms_range_encoder_raw_flush(&ctx->rc)) {
+ LZMS_DEBUG("Forwards bitstream overrun.");
+ return 0;
+ }
+
+ if (ctx->rc.out > ctx->os.out) {
+ LZMS_DEBUG("Two bitstreams crossed.");
+ return 0;
+ }
+
+ /* Now the compressed buffer contains the data output by the forwards
+ * bitstream, then empty space, then data output by the backwards
+ * bitstream. Move the data output by the forwards bitstream to be
+ * adjacent to the data output by the backwards bitstream, and calculate
+ * the compressed size that this results in. */
+ num_forwards_bytes = (u8*)ctx->rc.out - (u8*)cdata;
+ num_backwards_bytes = ((u8*)cdata + csize_avail) - (u8*)ctx->os.out;
+
+ memmove(cdata + num_forwards_bytes, ctx->os.out, num_backwards_bytes);
+
+ compressed_size = num_forwards_bytes + num_backwards_bytes;
+ LZMS_DEBUG("num_forwards_bytes=%zu, num_backwards_bytes=%zu, "
+ "compressed_size=%zu",
+ num_forwards_bytes, num_backwards_bytes, compressed_size);
+ LZMS_ASSERT(!(compressed_size & 1));
+ return compressed_size;