]> wimlib.net Git - wimlib/blobdiff - src/lzms-compress.c
Fix error string for WIMLIB_ERR_INVALID_COMPRESSION_TYPE
[wimlib] / src / lzms-compress.c
index 2a44e479a2da88136b149f8b8791c539ae1721eb..b98ee760d5765738e9ae927618b58fc1827795f2 100644 (file)
 #include "wimlib/compress_common.h"
 #include "wimlib/endianness.h"
 #include "wimlib/error.h"
+#include "wimlib/lz_hash.h"
+#include "wimlib/lz_sarray.h"
 #include "wimlib/lzms.h"
 #include "wimlib/util.h"
 
 #include <string.h>
+#include <limits.h>
+
+#define LZMS_OPTIM_ARRAY_SIZE  1024
+
+struct lzms_compressor;
+struct lzms_adaptive_state {
+       struct lzms_lz_lru_queues lru;
+       u8 main_state;
+       u8 match_state;
+       u8 lz_match_state;
+};
+#define LZ_ADAPTIVE_STATE struct lzms_adaptive_state
+#define LZ_COMPRESSOR    struct lzms_compressor
+#include "wimlib/lz_optimal.h"
 
 /* Stucture used for writing raw bits to the end of the LZMS-compressed data as
  * a series of 16-bit little endian coding units.  */
@@ -168,6 +184,18 @@ struct lzms_compressor {
        /* Size of the data in @buffer.  */
        u32 window_size;
 
+       /* Temporary array used by lz_analyze_block(); must be at least as long
+        * as the window.  */
+       u32 *prev_tab;
+
+       /* Suffix array match-finder.  */
+       struct lz_sarray lz_sarray;
+
+       struct raw_match matches[64];
+
+       /* Match-chooser.  */
+       struct lz_match_chooser mc;
+
        /* Maximum block size this compressor instantiation allows.  This is the
         * allocated size of @window.  */
        u32 max_block_size;
@@ -195,33 +223,13 @@ struct lzms_compressor {
        struct lzms_huffman_encoder delta_power_encoder;
        struct lzms_huffman_encoder delta_offset_encoder;
 
-       /* LRU (least-recently-used) queue of LZ match offsets.  */
-       u64 recent_lz_offsets[LZMS_NUM_RECENT_OFFSETS + 1];
-
-       /* LRU (least-recently-used) queue of delta match powers.  */
-       u32 recent_delta_powers[LZMS_NUM_RECENT_OFFSETS + 1];
-
-       /* LRU (least-recently-used) queue of delta match offsets.  */
-       u32 recent_delta_offsets[LZMS_NUM_RECENT_OFFSETS + 1];
-
-       /* These variables are used to delay updates to the LRU queues by one
-        * decoded item.  */
-       u32 prev_lz_offset;
-       u32 prev_delta_power;
-       u32 prev_delta_offset;
-       u32 upcoming_lz_offset;
-       u32 upcoming_delta_power;
-       u32 upcoming_delta_offset;
+       /* LRU (least-recently-used) queues for match information.  */
+       struct lzms_lru_queues lru;
 
        /* Used for preprocessing.  */
        s32 last_target_usages[65536];
 };
 
-struct lzms_match {
-       u32 length;
-       u32 offset;
-};
-
 /* Initialize the output bitstream @os to write forwards to the specified
  * compressed data buffer @out that is @out_limit 16-bit integers long.  */
 static void
@@ -476,6 +484,22 @@ lzms_encode_value(struct lzms_huffman_encoder *enc, u32 value)
        lzms_output_bitstream_put_bits(enc->os, extra_bits, num_extra_bits);
 }
 
+static void
+lzms_begin_encode_item(struct lzms_compressor *ctx)
+{
+       ctx->lru.lz.upcoming_offset = 0;
+       ctx->lru.delta.upcoming_offset = 0;
+       ctx->lru.delta.upcoming_power = 0;
+}
+
+static void
+lzms_end_encode_item(struct lzms_compressor *ctx, u32 length)
+{
+       LZMS_ASSERT(ctx->window_size - ctx->cur_window_pos >= length);
+       ctx->cur_window_pos += length;
+       lzms_update_lru_queues(&ctx->lru);
+}
+
 /* Encode a literal byte.  */
 static void
 lzms_encode_literal(struct lzms_compressor *ctx, u8 literal)
@@ -483,11 +507,15 @@ lzms_encode_literal(struct lzms_compressor *ctx, u8 literal)
        LZMS_DEBUG("Position %u: Encoding literal 0x%02x ('%c')",
                   ctx->cur_window_pos, literal, literal);
 
+       lzms_begin_encode_item(ctx);
+
        /* Main bit: 0 = a literal, not a match.  */
        lzms_range_encode_bit(&ctx->main_range_encoder, 0);
 
        /* Encode the literal using the current literal Huffman code.  */
        lzms_huffman_encode_symbol(&ctx->literal_encoder, literal);
+
+       lzms_end_encode_item(ctx, 1);
 }
 
 /* Encode a (length, offset) pair (LZ match).  */
@@ -496,6 +524,12 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset)
 {
        int recent_offset_idx;
 
+       LZMS_ASSERT(!memcmp(&ctx->window[ctx->cur_window_pos],
+                           &ctx->window[ctx->cur_window_pos - offset],
+                           length));
+
+       lzms_begin_encode_item(ctx);
+
        LZMS_DEBUG("Position %u: Encoding LZ match {length=%u, offset=%u}",
                   ctx->cur_window_pos, length, offset);
 
@@ -509,7 +543,7 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset)
        for (recent_offset_idx = 0;
             recent_offset_idx < LZMS_NUM_RECENT_OFFSETS;
             recent_offset_idx++)
-               if (offset == ctx->recent_lz_offsets[recent_offset_idx])
+               if (offset == ctx->lru.lz.recent_offsets[recent_offset_idx])
                        break;
 
        if (recent_offset_idx == LZMS_NUM_RECENT_OFFSETS) {
@@ -525,23 +559,22 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset)
 
                /* Repeat offset.  */
 
-
-               /* LZ match bit: 0 = repeat offset, not an explicit offset.  */
+               /* LZ match bit: 1 = repeat offset, not an explicit offset.  */
                lzms_range_encode_bit(&ctx->lz_match_range_encoder, 1);
 
                /* Encode the recent offset index.  A 1 bit is encoded for each
                 * index passed up.  This sequence of 1 bits is terminated by a
                 * 0 bit, or automatically when (LZMS_NUM_RECENT_OFFSETS - 1) 1
                 * bits have been encoded.  */
-               for (i = 0; i < recent_offset_idx - 1; i++)
+               for (i = 0; i < recent_offset_idx; i++)
                        lzms_range_encode_bit(&ctx->lz_repeat_match_range_encoders[i], 1);
 
                if (i < LZMS_NUM_RECENT_OFFSETS - 1)
                        lzms_range_encode_bit(&ctx->lz_repeat_match_range_encoders[i], 0);
 
                /* Initial update of the LZ match offset LRU queue.  */
-               for (i = recent_offset_idx; i < LZMS_NUM_RECENT_OFFSETS; i++)
-                       ctx->recent_lz_offsets[i] = ctx->recent_lz_offsets[i + 1];
+               for (; i < LZMS_NUM_RECENT_OFFSETS; i++)
+                       ctx->lru.lz.recent_offsets[i] = ctx->lru.lz.recent_offsets[i + 1];
        }
 
        /* Encode the match length.  */
@@ -549,19 +582,264 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset)
 
        /* Save the match offset for later insertion at the front of the LZ
         * match offset LRU queue.  */
-       ctx->upcoming_lz_offset = offset;
+       ctx->lru.lz.upcoming_offset = offset;
+
+       lzms_end_encode_item(ctx, length);
 }
 
-static struct lzms_match
-lzms_get_best_match(struct lzms_compressor *ctx)
+static void
+lzms_record_literal(u8 literal, void *_ctx)
 {
-       struct lzms_match match;
+       struct lzms_compressor *ctx = _ctx;
 
-       /* TODO */
+       lzms_encode_literal(ctx, literal);
+}
+
+static void
+lzms_record_match(unsigned length, unsigned offset, void *_ctx)
+{
+       struct lzms_compressor *ctx = _ctx;
+
+       lzms_encode_lz_match(ctx, length, offset);
+}
+
+static void
+lzms_fast_encode(struct lzms_compressor *ctx)
+{
+       static const struct lz_params lzms_lz_params = {
+               .min_match      = 3,
+               .max_match      = UINT_MAX,
+               .max_offset     = UINT_MAX,
+               .nice_match     = 64,
+               .good_match     = 32,
+               .max_chain_len  = 64,
+               .max_lazy_match = 258,
+               .too_far        = 4096,
+       };
+
+       lz_analyze_block(ctx->window,
+                        ctx->window_size,
+                        lzms_record_match,
+                        lzms_record_literal,
+                        ctx,
+                        &lzms_lz_params,
+                        ctx->prev_tab);
+
+}
+
+/* Fast heuristic cost evaluation to use in the inner loop of the match-finder.
+ * Unlike lzms_get_match_cost(), which does a true cost evaluation, this simply
+ * prioritize matches based on their offset.  */
+static input_idx_t
+lzms_match_cost_fast(input_idx_t length, input_idx_t offset, const void *_lru)
+{
+       const struct lzms_lz_lru_queues *lru = _lru;
+
+       for (input_idx_t i = 0; i < LZMS_NUM_RECENT_OFFSETS; i++)
+               if (offset == lru->recent_offsets[i])
+                       return i;
+
+       return offset;
+}
+
+static u32
+lzms_rc_bit_cost(const struct lzms_range_encoder *enc, u8 *cur_state, int bit)
+{
+       u32 prob;
+       u32 cost;
+
+       prob = enc->prob_entries[*cur_state & enc->mask].num_recent_zero_bits;
+       if (prob == 0)
+               prob = 1;
+       else if (prob == LZMS_PROBABILITY_MAX)
+               prob = LZMS_PROBABILITY_MAX - 1;
+
+       if (bit == 0)
+               prob = LZMS_PROBABILITY_MAX - prob;
+
+       cost = prob * 2; /* TODO */
+
+       *cur_state = (*cur_state << 1) | bit;
+
+       return cost;
+}
+
+#define LZMS_COST_SCALE 64
+
+static u32
+lzms_huffman_symbol_cost(const struct lzms_huffman_encoder *enc, u32 sym)
+{
+       return enc->lens[sym] * LZMS_COST_SCALE;
+}
+
+static u32
+lzms_value_cost(const struct lzms_huffman_encoder *enc, u32 value)
+{
+       u32 slot;
+       u32 num_extra_bits;
+       u32 cost = 0;
+
+       slot = lzms_get_slot(value, enc->slot_base_tab, enc->num_syms);
+
+       cost += lzms_huffman_symbol_cost(enc, slot);
+
+       num_extra_bits = bsr32(enc->slot_base_tab[slot + 1] -
+                              enc->slot_base_tab[slot]);
+
+       cost += num_extra_bits * LZMS_COST_SCALE;
+
+       return cost;
+}
+
+static u32
+lzms_get_matches(struct lzms_compressor *ctx,
+                const struct lzms_adaptive_state *cost_state,
+                struct raw_match **matches_ret)
+{
+       u32 num_matches;
+       struct raw_match *matches = ctx->matches;
+
+       num_matches = lz_sarray_get_matches(&ctx->lz_sarray,
+                                           matches,
+                                           lzms_match_cost_fast,
+                                           &cost_state->lru);
+
+#ifdef ENABLE_LZMS_DEBUG
+       u32 curpos = lz_sarray_get_pos(&ctx->lz_sarray) - 1;
+       LZMS_ASSERT(curpos >= 0);
+       for (u32 i = 0; i < num_matches; i++) {
+               LZMS_ASSERT(matches[i].len <= ctx->window_size - curpos);
+               LZMS_ASSERT(matches[i].offset > 0);
+               LZMS_ASSERT(matches[i].offset <= curpos);
+               LZMS_ASSERT(!memcmp(&ctx->window[curpos],
+                                   &ctx->window[curpos - matches[i].offset],
+                                   matches[i].len));
+               if (i > 0)
+                       LZMS_ASSERT(matches[i - 1].len > matches[i].len);
 
-       match.length = 0;
+       }
+#endif
 
-       return match;
+       *matches_ret = matches;
+       return num_matches;
+}
+
+static void
+lzms_skip_bytes(struct lzms_compressor *ctx, input_idx_t n)
+{
+       while (n--)
+               lz_sarray_skip_position(&ctx->lz_sarray);
+}
+
+static u32
+lzms_get_prev_literal_cost(struct lzms_compressor *ctx,
+                          struct lzms_adaptive_state *cost_state)
+{
+       u8 literal = ctx->window[lz_sarray_get_pos(&ctx->lz_sarray) - 1];
+       u32 cost = 0;
+
+       cost_state->lru.upcoming_offset = 0;
+       lzms_update_lz_lru_queues(&cost_state->lru);
+
+       cost += lzms_rc_bit_cost(&ctx->main_range_encoder,
+                                &cost_state->main_state, 0);
+       cost += lzms_huffman_symbol_cost(&ctx->literal_encoder, literal);
+
+       return cost;
+}
+
+static u32
+lzms_get_match_cost(struct lzms_compressor *ctx,
+                   struct lzms_adaptive_state *cost_state,
+                   input_idx_t length, input_idx_t offset)
+{
+       u32 cost = 0;
+       int recent_offset_idx;
+
+       cost += lzms_rc_bit_cost(&ctx->main_range_encoder,
+                                &cost_state->main_state, 1);
+       cost += lzms_rc_bit_cost(&ctx->match_range_encoder,
+                                &cost_state->match_state, 0);
+
+       for (recent_offset_idx = 0;
+            recent_offset_idx < LZMS_NUM_RECENT_OFFSETS;
+            recent_offset_idx++)
+               if (offset == cost_state->lru.recent_offsets[recent_offset_idx])
+                       break;
+
+       if (recent_offset_idx == LZMS_NUM_RECENT_OFFSETS) {
+               /* Explicit offset.  */
+               cost += lzms_rc_bit_cost(&ctx->lz_match_range_encoder,
+                                        &cost_state->lz_match_state, 0);
+
+               cost += lzms_value_cost(&ctx->lz_offset_encoder, offset);
+       } else {
+               int i;
+
+               /* Repeat offset.  */
+               cost += lzms_rc_bit_cost(&ctx->lz_match_range_encoder,
+                                        &cost_state->lz_match_state, 1);
+
+               for (i = 0; i < recent_offset_idx; i++)
+                       cost++; /* TODO */
+
+               if (i < LZMS_NUM_RECENT_OFFSETS - 1)
+                       cost++; /* TODO */
+
+               /* Initial update of the LZ match offset LRU queue.  */
+               for (; i < LZMS_NUM_RECENT_OFFSETS; i++)
+                       cost_state->lru.recent_offsets[i] = cost_state->lru.recent_offsets[i + 1];
+       }
+
+       cost += lzms_value_cost(&ctx->length_encoder, length);
+
+       cost_state->lru.upcoming_offset = offset;
+       lzms_update_lz_lru_queues(&cost_state->lru);
+
+       return cost;
+}
+
+static struct raw_match
+lzms_get_near_optimal_match(struct lzms_compressor *ctx)
+{
+       struct lzms_adaptive_state initial_state = {
+               .lru = ctx->lru.lz,
+               .main_state = ctx->main_range_encoder.state,
+               .match_state = ctx->match_range_encoder.state,
+               .lz_match_state = ctx->lz_match_range_encoder.state,
+       };
+       return lz_get_near_optimal_match(&ctx->mc,
+                                        lzms_get_matches,
+                                        lzms_skip_bytes,
+                                        lzms_get_prev_literal_cost,
+                                        lzms_get_match_cost,
+                                        ctx,
+                                        &initial_state);
+}
+
+static void
+lzms_slow_encode(struct lzms_compressor *ctx)
+{
+       struct raw_match match;
+
+       /* Load window into suffix array match-finder.  */
+       lz_sarray_load_window(&ctx->lz_sarray, ctx->window, ctx->window_size);
+
+       /* Reset the match-chooser.  */
+       lz_match_chooser_begin(&ctx->mc);
+
+       /* TODO */
+       while (ctx->cur_window_pos != ctx->window_size) {
+
+               match = lzms_get_near_optimal_match(ctx);
+               if (match.len <= 1) {
+                       /* Literal  */
+                       lzms_encode_literal(ctx, ctx->window[ctx->cur_window_pos]);
+               } else {
+                       /* LZ match  */
+                       lzms_encode_lz_match(ctx, match.len, match.offset);
+               }
+       }
 }
 
 static void
@@ -602,6 +880,7 @@ lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen,
 
        /* Copy the uncompressed data into the @ctx->window buffer.  */
        memcpy(ctx->window, udata, ulen);
+       memset(&ctx->window[ulen], 0, 8);
        ctx->cur_window_pos = 0;
        ctx->window_size = ulen;
 
@@ -665,20 +944,8 @@ lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen,
                lzms_init_range_encoder(&ctx->delta_repeat_match_range_encoders[i],
                                        &ctx->rc, LZMS_NUM_DELTA_REPEAT_MATCH_STATES);
 
-       /* Initialize the LRU queue for recent match offsets.  */
-       for (size_t i = 0; i < LZMS_NUM_RECENT_OFFSETS + 1; i++)
-               ctx->recent_lz_offsets[i] = i + 1;
-
-       for (size_t i = 0; i < LZMS_NUM_RECENT_OFFSETS + 1; i++) {
-               ctx->recent_delta_powers[i] = 0;
-               ctx->recent_delta_offsets[i] = i + 1;
-       }
-       ctx->prev_lz_offset = 0;
-       ctx->prev_delta_offset = 0;
-       ctx->prev_delta_power = 0;
-       ctx->upcoming_lz_offset = 0;
-       ctx->upcoming_delta_offset = 0;
-       ctx->upcoming_delta_power = 0;
+       /* Initialize LRU match information.  */
+        lzms_init_lru_queues(&ctx->lru);
 }
 
 /* Flush the output streams, prepare the final compressed data, and return its
@@ -733,7 +1000,6 @@ lzms_compress(const void *uncompressed_data, size_t uncompressed_size,
              void *compressed_data, size_t compressed_size_avail, void *_ctx)
 {
        struct lzms_compressor *ctx = _ctx;
-       struct lzms_match match;
        size_t compressed_size;
 
        LZMS_DEBUG("uncompressed_size=%zu, compressed_size_avail=%zu",
@@ -769,18 +1035,10 @@ lzms_compress(const void *uncompressed_data, size_t uncompressed_size,
 
        /* Determine and output a literal/match sequence that decompresses to
         * the preprocessed data.  */
-       while (ctx->cur_window_pos != ctx->window_size) {
-               match = lzms_get_best_match(ctx);
-               if (match.length == 0) {
-                       /* Literal  */
-                       lzms_encode_literal(ctx, ctx->window[ctx->cur_window_pos]);
-                       ctx->cur_window_pos++;
-               } else {
-                       /* LZ match  */
-                       lzms_encode_lz_match(ctx, match.length, match.offset);
-                       ctx->cur_window_pos += match.length;
-               }
-       }
+       if (1)
+               lzms_slow_encode(ctx);
+       else
+               lzms_fast_encode(ctx);
 
        /* Get and return the compressed data size.  */
        compressed_size = lzms_finalize(ctx, compressed_data,
@@ -816,14 +1074,14 @@ lzms_compress(const void *uncompressed_data, size_t uncompressed_size,
 
                        if (ret) {
                                ERROR("Failed to decompress data we "
-                                     "compressed using LZMN algorithm");
+                                     "compressed using LZMS algorithm");
                                wimlib_assert(0);
                                return 0;
                        }
                        if (memcmp(uncompressed_data, ctx->window,
                                   uncompressed_size))
                        {
-                               ERROR("Data we compressed using LZMN algorithm "
+                               ERROR("Data we compressed using LZMS algorithm "
                                      "didn't decompress to original");
                                wimlib_assert(0);
                                return 0;
@@ -845,6 +1103,9 @@ lzms_free_compressor(void *_ctx)
 
        if (ctx) {
                FREE(ctx->window);
+               FREE(ctx->prev_tab);
+               lz_sarray_destroy(&ctx->lz_sarray);
+               lz_match_chooser_destroy(&ctx->mc);
                FREE(ctx);
        }
 }
@@ -865,9 +1126,28 @@ lzms_create_compressor(size_t max_block_size,
        if (ctx == NULL)
                goto oom;
 
-       ctx->window = MALLOC(max_block_size);
+       ctx->window = MALLOC(max_block_size + 8);
        if (ctx->window == NULL)
                goto oom;
+
+       ctx->prev_tab = MALLOC(max_block_size * sizeof(ctx->prev_tab[0]));
+       if (ctx->prev_tab == NULL)
+               goto oom;
+
+       if (!lz_sarray_init(&ctx->lz_sarray,
+                           max_block_size,
+                           2,
+                           max_block_size,
+                           100,
+                           10))
+               goto oom;
+
+       if (!lz_match_chooser_init(&ctx->mc,
+                                  LZMS_OPTIM_ARRAY_SIZE,
+                                  32,
+                                  max_block_size))
+               goto oom;
+
        ctx->max_block_size = max_block_size;
 
        *ctx_ret = ctx;