X-Git-Url: https://wimlib.net/git/?p=wimlib;a=blobdiff_plain;f=src%2Flzms-compress.c;h=2c9356d9b1a538006a7fc9dcae90404bc2c271a2;hp=6df108061e8518d22a3d2e238d8d1d6484e98b86;hb=ee4fcdd5c4924803ae67a09fecac7d6b4b8ead6e;hpb=62e209e9aeaa36ba9e3c2a174428805b7264e0e7 diff --git a/src/lzms-compress.c b/src/lzms-compress.c index 6df10806..2c9356d9 100644 --- a/src/lzms-compress.c +++ b/src/lzms-compress.c @@ -24,8 +24,7 @@ /* This a compressor for the LZMS compression format. More details about this * format can be found in lzms-decompress.c. * - * This is currently an unsophisticated implementation that is fast but does not - * attain the best compression ratios allowed by the format. + * NOTE: this compressor currently does not code any delta matches. */ #ifdef HAVE_CONFIG_H @@ -39,7 +38,6 @@ #include "wimlib/compress_common.h" #include "wimlib/endianness.h" #include "wimlib/error.h" -#include "wimlib/lz_hash.h" #include "wimlib/lz_sarray.h" #include "wimlib/lzms.h" #include "wimlib/util.h" @@ -48,8 +46,6 @@ #include #include -#define LZMS_OPTIM_ARRAY_SIZE 1024 - struct lzms_compressor; struct lzms_adaptive_state { struct lzms_lz_lru_queues lru; @@ -128,7 +124,7 @@ struct lzms_range_encoder { * lzms_range_encoder_raw. */ struct lzms_range_encoder_raw *rc; - /* Bits recently encoded by this range encoder. This are used as in + /* Bits recently encoded by this range encoder. This is used as an * index into @prob_entries. */ u32 state; @@ -140,9 +136,7 @@ struct lzms_range_encoder { struct lzms_probability_entry prob_entries[LZMS_MAX_NUM_STATES]; }; -/* Structure used for Huffman encoding, optionally encoding larger "values" as a - * Huffman symbol specifying a slot and a slot-dependent number of extra bits. - * */ +/* Structure used for Huffman encoding. */ struct lzms_huffman_encoder { /* Bitstream to write Huffman-encoded symbols and verbatim bits to. @@ -150,9 +144,6 @@ struct lzms_huffman_encoder { */ struct lzms_output_bitstream *os; - /* Pointer to the slot base table to use. */ - const u32 *slot_base_tab; - /* Number of symbols that have been written using this code far. Reset * to 0 whenever the code is rebuilt. */ u32 num_syms_written; @@ -172,7 +163,7 @@ struct lzms_huffman_encoder { u8 lens[LZMS_MAX_NUM_SYMS]; /* The codeword of each symbol in the Huffman code. */ - u16 codewords[LZMS_MAX_NUM_SYMS]; + u32 codewords[LZMS_MAX_NUM_SYMS]; }; /* State of the LZMS compressor. */ @@ -186,16 +177,11 @@ struct lzms_compressor { /* Size of the data in @buffer. */ u32 window_size; -#if 0 - /* Temporary array used by lz_analyze_block(); must be at least as long - * as the window. */ - u32 *prev_tab; -#endif - /* Suffix array match-finder. */ struct lz_sarray lz_sarray; - struct raw_match matches[64]; + /* Temporary space to store found matches. */ + struct raw_match *matches; /* Match-chooser. */ struct lz_match_chooser mc; @@ -460,29 +446,36 @@ lzms_huffman_encode_symbol(struct lzms_huffman_encoder *enc, u32 sym) } } -/* Encode a number as a Huffman symbol specifying a slot, plus a number of - * slot-dependent extra bits. */ static void -lzms_encode_value(struct lzms_huffman_encoder *enc, u32 value) +lzms_encode_length(struct lzms_huffman_encoder *enc, u32 length) { unsigned slot; unsigned num_extra_bits; u32 extra_bits; - LZMS_ASSERT(enc->slot_base_tab != NULL); + slot = lzms_get_length_slot(length); - slot = lzms_get_slot(value, enc->slot_base_tab, enc->num_syms); + num_extra_bits = lzms_extra_length_bits[slot]; - /* Get the number of extra bits needed to represent the range of values - * that share the slot. */ - num_extra_bits = bsr32(enc->slot_base_tab[slot + 1] - - enc->slot_base_tab[slot]); + extra_bits = length - lzms_length_slot_base[slot]; - /* Calculate the extra bits as the offset from the slot base. */ - extra_bits = value - enc->slot_base_tab[slot]; + lzms_huffman_encode_symbol(enc, slot); + lzms_output_bitstream_put_bits(enc->os, extra_bits, num_extra_bits); +} + +static void +lzms_encode_offset(struct lzms_huffman_encoder *enc, u32 offset) +{ + unsigned slot; + unsigned num_extra_bits; + u32 extra_bits; + + slot = lzms_get_position_slot(offset); + + num_extra_bits = lzms_extra_position_bits[slot]; + + extra_bits = offset - lzms_position_slot_base[slot]; - /* Output the slot (Huffman-encoded), then the extra bits (verbatim). - */ lzms_huffman_encode_symbol(enc, slot); lzms_output_bitstream_put_bits(enc->os, extra_bits, num_extra_bits); } @@ -541,7 +534,7 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset) /* Main bit: 1 = a match, not a literal. */ lzms_range_encode_bit(&ctx->main_range_encoder, 1); - /* Match bit: 0 = a LZ match, not a delta match. */ + /* Match bit: 0 = an LZ match, not a delta match. */ lzms_range_encode_bit(&ctx->match_range_encoder, 0); /* Determine if the offset can be represented as a recent offset. */ @@ -558,7 +551,7 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset) lzms_range_encode_bit(&ctx->lz_match_range_encoder, 0); /* Encode the match offset. */ - lzms_encode_value(&ctx->lz_offset_encoder, offset); + lzms_encode_offset(&ctx->lz_offset_encoder, offset); } else { int i; @@ -583,7 +576,7 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset) } /* Encode the match length. */ - lzms_encode_value(&ctx->length_encoder, length); + lzms_encode_length(&ctx->length_encoder, length); /* Save the match offset for later insertion at the front of the LZ * match offset LRU queue. */ @@ -592,48 +585,6 @@ lzms_encode_lz_match(struct lzms_compressor *ctx, u32 length, u32 offset) lzms_end_encode_item(ctx, length); } -#if 0 -static void -lzms_record_literal(u8 literal, void *_ctx) -{ - struct lzms_compressor *ctx = _ctx; - - lzms_encode_literal(ctx, literal); -} - -static void -lzms_record_match(unsigned length, unsigned offset, void *_ctx) -{ - struct lzms_compressor *ctx = _ctx; - - lzms_encode_lz_match(ctx, length, offset); -} - -static void -lzms_fast_encode(struct lzms_compressor *ctx) -{ - static const struct lz_params lzms_lz_params = { - .min_match = 3, - .max_match = UINT_MAX, - .max_offset = UINT_MAX, - .nice_match = 64, - .good_match = 32, - .max_chain_len = 64, - .max_lazy_match = 258, - .too_far = 4096, - }; - - lz_analyze_block(ctx->window, - ctx->window_size, - lzms_record_match, - lzms_record_literal, - ctx, - &lzms_lz_params, - ctx->prev_tab); - -} -#endif - /* Fast heuristic cost evaluation to use in the inner loop of the match-finder. * Unlike lzms_get_lz_match_cost(), which does a true cost evaluation, this * simply prioritize matches based on their offset. */ @@ -654,7 +605,7 @@ lzms_lz_match_cost_fast(input_idx_t length, input_idx_t offset, const void *_lru /*#define LZMS_RC_COSTS_USE_FLOATING_POINT*/ static u32 -lzms_rc_costs[LZMS_PROBABILITY_MAX]; +lzms_rc_costs[LZMS_PROBABILITY_MAX + 1]; #ifdef LZMS_RC_COSTS_USE_FLOATING_POINT # include @@ -684,7 +635,7 @@ lzms_do_init_rc_costs(void) * really interpreted as 1 / 64 and 64 / 64 is really interpreted as * 63 / 64. */ - for (u32 i = 0; i < LZMS_PROBABILITY_MAX; i++) { + for (u32 i = 0; i <= LZMS_PROBABILITY_MAX; i++) { u32 prob = i; if (prob == 0) @@ -715,17 +666,9 @@ lzms_do_init_rc_costs(void) static void lzms_init_rc_costs(void) { - static bool done = false; - static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER; - - if (!done) { - pthread_mutex_lock(&mutex); - if (!done) { - lzms_do_init_rc_costs(); - done = true; - } - pthread_mutex_unlock(&mutex); - } + static pthread_once_t once = PTHREAD_ONCE_INIT; + + pthread_once(&once, lzms_do_init_rc_costs); } /* @@ -747,11 +690,6 @@ lzms_rc_bit_cost(const struct lzms_range_encoder *enc, u8 *cur_state, int bit) *cur_state = (*cur_state << 1) | bit; - if (prob_zero == 0) - prob_zero = 1; - else if (prob_zero == LZMS_PROBABILITY_MAX) - prob_zero = LZMS_PROBABILITY_MAX - 1; - if (bit == 0) prob_correct = prob_zero; else @@ -766,20 +704,36 @@ lzms_huffman_symbol_cost(const struct lzms_huffman_encoder *enc, u32 sym) return enc->lens[sym] << LZMS_COST_SHIFT; } -/* Compute the cost to encode a number with lzms_encode_value(). */ static u32 -lzms_value_cost(const struct lzms_huffman_encoder *enc, u32 value) +lzms_offset_cost(const struct lzms_huffman_encoder *enc, u32 offset) +{ + u32 slot; + u32 num_extra_bits; + u32 cost = 0; + + slot = lzms_get_position_slot(offset); + + cost += lzms_huffman_symbol_cost(enc, slot); + + num_extra_bits = lzms_extra_position_bits[slot]; + + cost += num_extra_bits << LZMS_COST_SHIFT; + + return cost; +} + +static u32 +lzms_length_cost(const struct lzms_huffman_encoder *enc, u32 length) { u32 slot; u32 num_extra_bits; u32 cost = 0; - slot = lzms_get_slot(value, enc->slot_base_tab, enc->num_syms); + slot = lzms_get_length_slot(length); cost += lzms_huffman_symbol_cost(enc, slot); - num_extra_bits = bsr32(enc->slot_base_tab[slot + 1] - - enc->slot_base_tab[slot]); + num_extra_bits = lzms_extra_length_bits[slot]; cost += num_extra_bits << LZMS_COST_SHIFT; @@ -791,37 +745,11 @@ lzms_get_matches(struct lzms_compressor *ctx, const struct lzms_adaptive_state *state, struct raw_match **matches_ret) { - u32 num_matches; - struct raw_match *matches = ctx->matches; - - num_matches = lz_sarray_get_matches(&ctx->lz_sarray, - matches, - lzms_lz_match_cost_fast, - &state->lru); -#if 0 - fprintf(stderr, "Pos %u: %u matches\n", - lz_sarray_get_pos(&ctx->lz_sarray) - 1, num_matches); - for (u32 i = 0; i < num_matches; i++) - fprintf(stderr, "\tLen %u Offset %u\n", matches[i].len, matches[i].offset); -#endif - -#ifdef ENABLE_LZMS_DEBUG - LZMS_ASSERT(lz_sarray_get_pos(&ctx->lz_sarray) > 0); - u32 curpos = lz_sarray_get_pos(&ctx->lz_sarray) - 1; - for (u32 i = 0; i < num_matches; i++) { - LZMS_ASSERT(matches[i].len <= ctx->window_size - curpos); - LZMS_ASSERT(matches[i].offset > 0); - LZMS_ASSERT(matches[i].offset <= curpos); - LZMS_ASSERT(!memcmp(&ctx->window[curpos], - &ctx->window[curpos - matches[i].offset], - matches[i].len)); - if (i < num_matches - 1) - LZMS_ASSERT(matches[i].len > matches[i + 1].len); - - } -#endif - *matches_ret = matches; - return num_matches; + *matches_ret = ctx->matches; + return lz_sarray_get_matches(&ctx->lz_sarray, + ctx->matches, + lzms_lz_match_cost_fast, + &state->lru); } static void @@ -873,7 +801,7 @@ lzms_get_lz_match_cost(struct lzms_compressor *ctx, cost += lzms_rc_bit_cost(&ctx->lz_match_range_encoder, &state->lz_match_state, 0); - cost += lzms_value_cost(&ctx->lz_offset_encoder, offset); + cost += lzms_offset_cost(&ctx->lz_offset_encoder, offset); } else { int i; @@ -895,7 +823,7 @@ lzms_get_lz_match_cost(struct lzms_compressor *ctx, state->lru.recent_offsets[i] = state->lru.recent_offsets[i + 1]; } - cost += lzms_value_cost(&ctx->length_encoder, length); + cost += lzms_length_cost(&ctx->length_encoder, length); state->lru.upcoming_offset = offset; lzms_update_lz_lru_queues(&state->lru); @@ -940,12 +868,13 @@ lzms_get_near_optimal_match(struct lzms_compressor *ctx) * - The costs of literals and matches are estimated using the range encoder * states and the semi-adaptive Huffman codes. Except for range encoding * states, costs are assumed to be constant throughout a single run of the - * parsing algorithm, which can parse up to LZMS_OPTIM_ARRAY_SIZE bytes of - * data. This introduces a source of inaccuracy because the probabilities and - * Huffman codes can change over this part of the data. + * parsing algorithm, which can parse up to @optim_array_length (from the + * `struct wimlib_lzms_compressor_params') bytes of data. This introduces a + * source of inaccuracy because the probabilities and Huffman codes can change + * over this part of the data. */ static void -lzms_normal_encode(struct lzms_compressor *ctx) +lzms_encode(struct lzms_compressor *ctx) { struct raw_match match; @@ -980,12 +909,10 @@ lzms_init_range_encoder(struct lzms_range_encoder *enc, static void lzms_init_huffman_encoder(struct lzms_huffman_encoder *enc, struct lzms_output_bitstream *os, - const u32 *slot_base_tab, unsigned num_syms, unsigned rebuild_freq) { enc->os = os; - enc->slot_base_tab = slot_base_tab; enc->num_syms_written = 0; enc->rebuild_freq = rebuild_freq; enc->num_syms = num_syms; @@ -1008,7 +935,6 @@ lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen, /* Copy the uncompressed data into the @ctx->window buffer. */ memcpy(ctx->window, udata, ulen); - memset(&ctx->window[ulen], 0, 8); ctx->cur_window_pos = 0; ctx->window_size = ulen; @@ -1028,23 +954,23 @@ lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen, /* Initialize Huffman encoders for each alphabet used in the compressed * representation. */ lzms_init_huffman_encoder(&ctx->literal_encoder, &ctx->os, - NULL, LZMS_NUM_LITERAL_SYMS, + LZMS_NUM_LITERAL_SYMS, LZMS_LITERAL_CODE_REBUILD_FREQ); lzms_init_huffman_encoder(&ctx->lz_offset_encoder, &ctx->os, - lzms_position_slot_base, num_position_slots, + num_position_slots, LZMS_LZ_OFFSET_CODE_REBUILD_FREQ); lzms_init_huffman_encoder(&ctx->length_encoder, &ctx->os, - lzms_length_slot_base, LZMS_NUM_LEN_SYMS, + LZMS_NUM_LEN_SYMS, LZMS_LENGTH_CODE_REBUILD_FREQ); lzms_init_huffman_encoder(&ctx->delta_offset_encoder, &ctx->os, - lzms_position_slot_base, num_position_slots, + num_position_slots, LZMS_DELTA_OFFSET_CODE_REBUILD_FREQ); lzms_init_huffman_encoder(&ctx->delta_power_encoder, &ctx->os, - NULL, LZMS_NUM_DELTA_POWER_SYMS, + LZMS_NUM_DELTA_POWER_SYMS, LZMS_DELTA_POWER_CODE_REBUILD_FREQ); /* Initialize range encoders, all of which wrap around the same @@ -1070,7 +996,7 @@ lzms_init_compressor(struct lzms_compressor *ctx, const u8 *udata, u32 ulen, &ctx->rc, LZMS_NUM_DELTA_REPEAT_MATCH_STATES); /* Initialize LRU match information. */ - lzms_init_lru_queues(&ctx->lru); + lzms_init_lru_queues(&ctx->lru); } /* Flush the output streams, prepare the final compressed data, and return its @@ -1162,10 +1088,7 @@ lzms_compress(const void *uncompressed_data, size_t uncompressed_size, /* Compute and encode a literal/match sequence that decompresses to the * preprocessed data. */ - lzms_normal_encode(ctx); -#if 0 - lzms_fast_encode(ctx); -#endif + lzms_encode(ctx); /* Get and return the compressed data size. */ compressed_size = lzms_finalize(ctx, compressed_data, @@ -1230,17 +1153,17 @@ lzms_free_compressor(void *_ctx) if (ctx) { FREE(ctx->window); -#if 0 - FREE(ctx->prev_tab); -#endif + FREE(ctx->matches); lz_sarray_destroy(&ctx->lz_sarray); lz_match_chooser_destroy(&ctx->mc); FREE(ctx); } } -static const struct wimlib_lzms_compressor_params default_params = { - .hdr = sizeof(struct wimlib_lzms_compressor_params), +static const struct wimlib_lzms_compressor_params lzms_default = { + .hdr = { + .size = sizeof(struct wimlib_lzms_compressor_params), + }, .min_match_length = 2, .max_match_length = UINT32_MAX, .nice_match_length = 32, @@ -1249,49 +1172,54 @@ static const struct wimlib_lzms_compressor_params default_params = { .optim_array_length = 1024, }; +static bool +lzms_params_valid(const struct wimlib_compressor_params_header *); + +static const struct wimlib_lzms_compressor_params * +lzms_get_params(const struct wimlib_compressor_params_header *_params) +{ + const struct wimlib_lzms_compressor_params *params = + (const struct wimlib_lzms_compressor_params*)_params; + + if (params == NULL) + params = &lzms_default; + + LZMS_ASSERT(lzms_params_valid(¶ms->hdr)); + + return params; +} + static int lzms_create_compressor(size_t max_block_size, const struct wimlib_compressor_params_header *_params, void **ctx_ret) { struct lzms_compressor *ctx; - const struct wimlib_lzms_compressor_params *params; + const struct wimlib_lzms_compressor_params *params = lzms_get_params(_params); if (max_block_size == 0 || max_block_size >= INT32_MAX) { LZMS_DEBUG("Invalid max_block_size (%u)", max_block_size); return WIMLIB_ERR_INVALID_PARAM; } - if (_params) - params = (const struct wimlib_lzms_compressor_params*)_params; - else - params = &default_params; - - if (params->max_match_length < params->min_match_length || - params->min_match_length < 2 || - params->optim_array_length == 0 || - min(params->max_match_length, params->nice_match_length) > 65536) { - LZMS_DEBUG("Invalid compression parameter!"); - return WIMLIB_ERR_INVALID_PARAM; - } - ctx = CALLOC(1, sizeof(struct lzms_compressor)); if (ctx == NULL) goto oom; - ctx->window = MALLOC(max_block_size + 8); + ctx->window = MALLOC(max_block_size); if (ctx->window == NULL) goto oom; -#if 0 - ctx->prev_tab = MALLOC(max_block_size * sizeof(ctx->prev_tab[0])); - if (ctx->prev_tab == NULL) + ctx->matches = MALLOC(min(params->max_match_length - + params->min_match_length + 1, + params->max_matches_per_pos) * + sizeof(ctx->matches[0])); + if (ctx->matches == NULL) goto oom; -#endif if (!lz_sarray_init(&ctx->lz_sarray, max_block_size, params->min_match_length, - params->max_match_length, + min(params->max_match_length, LZ_SARRAY_LEN_MAX), params->max_search_depth, params->max_matches_per_pos)) goto oom; @@ -1302,8 +1230,8 @@ lzms_create_compressor(size_t max_block_size, params->max_match_length)) goto oom; - /* Initialize position and length slot bases if not done already. */ - lzms_init_slot_bases(); + /* Initialize position and length slot data if not done already. */ + lzms_init_slots(); /* Initialize range encoding cost table if not done already. */ lzms_init_rc_costs(); @@ -1318,7 +1246,46 @@ oom: return WIMLIB_ERR_NOMEM; } +static u64 +lzms_get_needed_memory(size_t max_block_size, + const struct wimlib_compressor_params_header *_params) +{ + const struct wimlib_lzms_compressor_params *params = lzms_get_params(_params); + + u64 size = 0; + + size += max_block_size; + size += sizeof(struct lzms_compressor); + size += lz_sarray_get_needed_memory(max_block_size); + size += lz_match_chooser_get_needed_memory(params->optim_array_length, + params->nice_match_length, + params->max_match_length); + size += min(params->max_match_length - + params->min_match_length + 1, + params->max_matches_per_pos) * + sizeof(((struct lzms_compressor*)0)->matches[0]); + return size; +} + +static bool +lzms_params_valid(const struct wimlib_compressor_params_header *_params) +{ + const struct wimlib_lzms_compressor_params *params = + (const struct wimlib_lzms_compressor_params*)_params; + + if (params->hdr.size != sizeof(*params) || + params->max_match_length < params->min_match_length || + params->min_match_length < 2 || + params->optim_array_length == 0 || + min(params->max_match_length, params->nice_match_length) > 65536) + return false; + + return true; +} + const struct compressor_ops lzms_compressor_ops = { + .params_valid = lzms_params_valid, + .get_needed_memory = lzms_get_needed_memory, .create_compressor = lzms_create_compressor, .compress = lzms_compress, .free_compressor = lzms_free_compressor,