X-Git-Url: https://wimlib.net/git/?p=wimlib;a=blobdiff_plain;f=src%2Flzms_decompress.c;h=e14ba590ac311dc074699470e27277f177bff178;hp=73d4486da81b73ca0dbf6dd86528309a0777506f;hb=a739a1c0e0d2091209544f8f155baa0df6935d6f;hpb=f18b7fc3361c4daac0ddd104af65a8eff8466fec diff --git a/src/lzms_decompress.c b/src/lzms_decompress.c index 73d4486d..e14ba590 100644 --- a/src/lzms_decompress.c +++ b/src/lzms_decompress.c @@ -5,7 +5,7 @@ */ /* - * Copyright (C) 2013, 2014 Eric Biggers + * Copyright (C) 2013, 2014, 2015 Eric Biggers * * This file is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free @@ -50,19 +50,67 @@ * matches or "delta" matches, either of which can have its offset encoded * explicitly or encoded via a reference to a recently used (repeat) offset. * - * A traditional LZ match consists of a length and offset; it asserts that the - * sequence of bytes beginning at the current position and extending for the - * length is exactly equal to the equal-length sequence of bytes at the offset - * back in the data buffer. On the other hand, a delta match consists of a - * length, raw offset, and power. It asserts that the sequence of bytes - * beginning at the current position and extending for the length is equal to - * the bytewise sum of the two equal-length sequences of bytes (2**power) and - * (raw_offset * 2**power) bytes before the current position, minus bytewise the - * sequence of bytes beginning at (2**power + raw_offset * 2**power) bytes - * before the current position. Although not generally as useful as traditional - * LZ matches, delta matches can be helpful on some types of data. Both LZ and - * delta matches may overlap with the current position; in fact, the minimum - * offset is 1, regardless of match length. + * A traditional LZ77 match consists of a length and offset. It asserts that + * the sequence of bytes beginning at the current position and extending for the + * length is equal to the same-length sequence of bytes at the offset back in + * the data buffer. This type of match can be visualized as follows, with the + * caveat that the sequences may overlap: + * + * offset + * -------------------- + * | | + * B[1...len] A[1...len] + * + * Decoding proceeds as follows: + * + * do { + * *A++ = *B++; + * } while (--length); + * + * On the other hand, a delta match consists of a "span" as well as a length and + * offset. A delta match can be visualized as follows, with the caveat that the + * various sequences may overlap: + * + * offset + * ----------------------------- + * | | + * span | span | + * ------------- ------------- + * | | | | + * D[1...len] C[1...len] B[1...len] A[1...len] + * + * Decoding proceeds as follows: + * + * do { + * *A++ = *B++ + *C++ - *D++; + * } while (--length); + * + * A delta match asserts that the bytewise differences of the A and B sequences + * are equal to the bytewise differences of the C and D sequences. The + * sequences within each pair are separated by the same number of bytes, the + * "span". The inter-pair distance is the "offset". In LZMS, spans are + * restricted to powers of 2 between 2**0 and 2**7 inclusively. Offsets are + * restricted to multiples of the span. The stored value for the offset is the + * "raw offset", which is the real offset divided by the span. + * + * Delta matches can cover data containing a series of power-of-2 sized integers + * that is linearly increasing or decreasing. Another way of thinking about it + * is that a delta match can match a longer sequence that is interrupted by a + * non-matching byte, provided that the non-matching byte is a continuation of a + * linearly changing pattern. Examples of files that may contain data like this + * are uncompressed bitmap images, uncompressed digital audio, and Unicode data + * tables. To some extent, this match type is a replacement for delta filters + * or multimedia filters that are sometimes used in other compression software + * (e.g. 'xz --delta --lzma2'). However, on most types of files, delta matches + * do not seem to be very useful. + * + * Both LZ and delta matches may use overlapping sequences. Therefore, they + * must be decoded as if only one byte is copied at a time. + * + * For both LZ and delta matches, any match length in [1, 1073809578] can be + * represented. Similarly, any match offset in [1, 1180427428] can be + * represented. For delta matches, this range applies to the raw offset, so the + * real offset may be larger. * * For LZ matches, up to 3 repeat offsets are allowed, similar to some other * LZ-based formats such as LZX and LZMA. They must updated in an LRU fashion, @@ -78,10 +126,9 @@ * references to the first 3 entries at any given time. The queue must be * initialized to the offsets {1, 2, 3, 4}. * - * Repeat delta matches are handled similarly, but for them there are two queues - * updated in lock-step: one for powers and one for raw offsets. The power - * queue must be initialized to {0, 0, 0, 0}, and the raw offset queue must be - * initialized to {1, 2, 3, 4}. + * Repeat delta matches are handled similarly, but for them the queue contains + * (power, raw offset) pairs. This queue must be initialized to + * {(0, 1), (0, 2), (0, 3), (0, 4)}. * * Bits from the binary range decoder must be used to disambiguate item types. * The range decoder must hold two state variables: the range, which must @@ -106,14 +153,14 @@ * it. * * The probability used to range-decode each bit must be taken from a table, of - * which one instance must exist for each distinct context in which a - * range-decoded bit is needed. At each call of the range decoder, the - * appropriate probability must be obtained by indexing the appropriate - * probability table with the last 4 (in the context disambiguating literals - * from matches), 5 (in the context disambiguating LZ matches from delta - * matches), or 6 (in all other contexts) bits recently range-decoded in that - * context, ordered such that the most recently decoded bit is the low-order bit - * of the index. + * which one instance must exist for each distinct context, or "binary decision + * class", in which a range-decoded bit is needed. At each call of the range + * decoder, the appropriate probability must be obtained by indexing the + * appropriate probability table with the last 4 (in the context disambiguating + * literals from matches), 5 (in the context disambiguating LZ matches from + * delta matches), or 6 (in all other contexts) bits recently range-decoded in + * that context, ordered such that the most recently decoded bit is the + * low-order bit of the index. * * Furthermore, each probability entry itself is variable, as its value must be * maintained as n/64 where n is the number of 0 bits in the most recently @@ -150,12 +197,12 @@ * reconstitute the full length. This code must be rebuilt whenever 512 * symbols have been decoded with it. * - * - The delta offset code, used for decoding the offsets of delta matches. + * - The delta offset code, used for decoding the raw offsets of delta matches. * Each symbol corresponds to an offset slot, which corresponds to a base * value and some number of extra bits which must be read and added to the - * base value to reconstitute the full offset. The number of symbols in this - * code is equal to the number of symbols in the LZ offset code. This code - * must be rebuilt whenever 1024 symbols have been decoded with it. + * base value to reconstitute the full raw offset. The number of symbols in + * this code is equal to the number of symbols in the LZ offset code. This + * code must be rebuilt whenever 1024 symbols have been decoded with it. * * - The delta power code, used for decoding the powers of delta matches. Each * of the 8 symbols corresponds to a power. This code must be rebuilt @@ -229,10 +276,10 @@ struct lzms_range_decoder { /* Pointer to the next little-endian 16-bit integer in the compressed * input data (reading forwards). */ - const le16 *next; + const u8 *next; /* Pointer to the end of the compressed input data. */ - const le16 *end; + const u8 *end; }; typedef u64 bitbuf_t; @@ -248,22 +295,23 @@ struct lzms_input_bitstream { /* Pointer to the one past the next little-endian 16-bit integer in the * compressed input data (reading backwards). */ - const le16 *next; + const u8 *next; /* Pointer to the beginning of the compressed input data. */ - const le16 *begin; + const u8 *begin; }; +#define BITBUF_NBITS (8 * sizeof(bitbuf_t)) + /* Bookkeeping information for an adaptive Huffman code */ struct lzms_huffman_rebuild_info { unsigned num_syms_until_rebuild; + unsigned num_syms; unsigned rebuild_freq; + u32 *codewords; + u32 *freqs; u16 *decode_table; unsigned table_bits; - u32 *freqs; - u32 *codewords; - u8 *lens; - unsigned num_syms; }; struct lzms_decompressor { @@ -273,44 +321,7 @@ struct lzms_decompressor { union { struct { - struct lzms_range_decoder rd; - struct lzms_input_bitstream is; - - /* Match offset LRU queues */ - u32 recent_lz_offsets[LZMS_NUM_RECENT_OFFSETS + 1]; - u64 recent_delta_offsets[LZMS_NUM_RECENT_OFFSETS + 1]; - u32 pending_lz_offset; - u64 pending_delta_offset; - const u8 *lz_offset_still_pending; - const u8 *delta_offset_still_pending; - - /* States and probabilities for range decoding */ - - u32 main_state; - struct lzms_probability_entry main_prob_entries[ - LZMS_NUM_MAIN_STATES]; - - u32 match_state; - struct lzms_probability_entry match_prob_entries[ - LZMS_NUM_MATCH_STATES]; - - u32 lz_match_state; - struct lzms_probability_entry lz_match_prob_entries[ - LZMS_NUM_LZ_MATCH_STATES]; - - u32 delta_match_state; - struct lzms_probability_entry delta_match_prob_entries[ - LZMS_NUM_DELTA_MATCH_STATES]; - - u32 lz_repeat_match_states[LZMS_NUM_RECENT_OFFSETS - 1]; - struct lzms_probability_entry lz_repeat_match_prob_entries[ - LZMS_NUM_RECENT_OFFSETS - 1][LZMS_NUM_LZ_REPEAT_MATCH_STATES]; - - u32 delta_repeat_match_states[LZMS_NUM_RECENT_OFFSETS - 1]; - struct lzms_probability_entry delta_repeat_match_prob_entries[ - LZMS_NUM_RECENT_OFFSETS - 1][LZMS_NUM_DELTA_REPEAT_MATCH_STATES]; - - /* Huffman decoding */ + struct lzms_probabilites probs; u16 literal_decode_table[(1 << LZMS_LITERAL_TABLEBITS) + (2 * LZMS_NUM_LITERAL_SYMS)] @@ -318,18 +329,18 @@ struct lzms_decompressor { u32 literal_freqs[LZMS_NUM_LITERAL_SYMS]; struct lzms_huffman_rebuild_info literal_rebuild_info; - u16 length_decode_table[(1 << LZMS_LENGTH_TABLEBITS) + - (2 * LZMS_NUM_LENGTH_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); - u32 length_freqs[LZMS_NUM_LENGTH_SYMS]; - struct lzms_huffman_rebuild_info length_rebuild_info; - u16 lz_offset_decode_table[(1 << LZMS_LZ_OFFSET_TABLEBITS) + ( 2 * LZMS_MAX_NUM_OFFSET_SYMS)] _aligned_attribute(DECODE_TABLE_ALIGNMENT); u32 lz_offset_freqs[LZMS_MAX_NUM_OFFSET_SYMS]; struct lzms_huffman_rebuild_info lz_offset_rebuild_info; + u16 length_decode_table[(1 << LZMS_LENGTH_TABLEBITS) + + (2 * LZMS_NUM_LENGTH_SYMS)] + _aligned_attribute(DECODE_TABLE_ALIGNMENT); + u32 length_freqs[LZMS_NUM_LENGTH_SYMS]; + struct lzms_huffman_rebuild_info length_rebuild_info; + u16 delta_offset_decode_table[(1 << LZMS_DELTA_OFFSET_TABLEBITS) + (2 * LZMS_MAX_NUM_OFFSET_SYMS)] _aligned_attribute(DECODE_TABLE_ALIGNMENT); @@ -343,7 +354,6 @@ struct lzms_decompressor { struct lzms_huffman_rebuild_info delta_power_rebuild_info; u32 codewords[LZMS_MAX_NUM_SYMS]; - u8 lens[LZMS_MAX_NUM_SYMS]; }; // struct @@ -353,10 +363,10 @@ struct lzms_decompressor { }; /* Initialize the input bitstream @is to read backwards from the compressed data - * buffer @in that is @count 16-bit integers long. */ + * buffer @in that is @count bytes long. */ static void lzms_input_bitstream_init(struct lzms_input_bitstream *is, - const le16 *in, size_t count) + const u8 *in, size_t count) { is->bitbuf = 0; is->bitsleft = 0; @@ -369,27 +379,39 @@ lzms_input_bitstream_init(struct lzms_input_bitstream *is, static inline void lzms_ensure_bits(struct lzms_input_bitstream *is, unsigned num_bits) { + unsigned avail; + if (is->bitsleft >= num_bits) return; - if (likely(is->next != is->begin)) - is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next) - << (sizeof(is->bitbuf) * 8 - is->bitsleft - 16); - is->bitsleft += 16; + avail = BITBUF_NBITS - is->bitsleft; - if (likely(is->next != is->begin)) - is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next) - << (sizeof(is->bitbuf) * 8 - is->bitsleft - 16); - is->bitsleft += 16; + if (UNALIGNED_ACCESS_IS_FAST && CPU_IS_LITTLE_ENDIAN && + WORDSIZE == 8 && likely(is->next - is->begin >= 8)) + { + is->next -= (avail & ~15) >> 3; + is->bitbuf |= load_u64_unaligned(is->next) << (avail & 15); + is->bitsleft += avail & ~15; + } else { + if (likely(is->next != is->begin)) { + is->next -= sizeof(le16); + is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next) + << (avail - 16); + } + if (likely(is->next != is->begin)) { + is->next -= sizeof(le16); + is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next) + << (avail - 32); + } + is->bitsleft += 32; + } } /* Get @num_bits bits from the bitbuffer variable. */ static inline bitbuf_t lzms_peek_bits(struct lzms_input_bitstream *is, unsigned num_bits) { - if (unlikely(num_bits == 0)) - return 0; - return is->bitbuf >> (sizeof(is->bitbuf) * 8 - num_bits); + return (is->bitbuf >> 1) >> (BITBUF_NBITS - num_bits - 1); } /* Remove @num_bits bits from the bitbuffer variable. */ @@ -418,32 +440,50 @@ lzms_read_bits(struct lzms_input_bitstream *is, unsigned num_bits) } /* Initialize the range decoder @rd to read forwards from the compressed data - * buffer @in that is @count 16-bit integers long. */ + * buffer @in that is @count bytes long. */ static void lzms_range_decoder_init(struct lzms_range_decoder *rd, - const le16 *in, size_t count) + const u8 *in, size_t count) { rd->range = 0xffffffff; - rd->code = ((u32)le16_to_cpu(in[0]) << 16) | le16_to_cpu(in[1]); - rd->next = in + 2; + rd->code = ((u32)get_unaligned_le16(in) << 16) | + get_unaligned_le16(in + 2); + rd->next = in + 4; rd->end = in + count; } -/* Decode and return the next bit from the range decoder. - * - * @prob is the chance out of LZMS_PROBABILITY_MAX that the next bit is 0. +/* + * Decode a bit using the range coder. The current state specifies the + * probability entry to use. The state and probability entry will be updated + * based on the decoded bit. */ static inline int -lzms_range_decoder_decode_bit(struct lzms_range_decoder *rd, u32 prob) +lzms_decode_bit(struct lzms_range_decoder *rd, u32 *state_p, u32 num_states, + struct lzms_probability_entry *probs) { + struct lzms_probability_entry *prob_entry; + u32 prob; u32 bound; + /* Load the probability entry corresponding to the current state. */ + prob_entry = &probs[*state_p]; + + /* Update the state early. We'll still need to OR the state with 1 + * later if the decoded bit is a 1. */ + *state_p = (*state_p << 1) & (num_states - 1); + + /* Get the probability (out of LZMS_PROBABILITY_DENOMINATOR) that the + * next bit is 0. */ + prob = lzms_get_probability(prob_entry); + /* Normalize if needed. */ - if (rd->range <= 0xffff) { + if (!(rd->range & 0xFFFF0000)) { rd->range <<= 16; rd->code <<= 16; - if (likely(rd->next != rd->end)) - rd->code |= le16_to_cpu(*rd->next++); + if (likely(rd->next != rd->end)) { + rd->code |= get_unaligned_le16(rd->next); + rd->next += sizeof(le16); + } } /* Based on the probability, calculate the bound between the 0-bit @@ -453,136 +493,117 @@ lzms_range_decoder_decode_bit(struct lzms_range_decoder *rd, u32 prob) if (rd->code < bound) { /* Current code is in the 0-bit region of the range. */ rd->range = bound; + + /* Update the state and probability entry based on the decoded bit. */ + lzms_update_probability_entry(prob_entry, 0); return 0; } else { /* Current code is in the 1-bit region of the range. */ rd->range -= bound; rd->code -= bound; + + /* Update the state and probability entry based on the decoded bit. */ + lzms_update_probability_entry(prob_entry, 1); + *state_p |= 1; return 1; } } -/* Decode and return the next bit from the range decoder. This wraps around - * lzms_range_decoder_decode_bit() to handle using and updating the appropriate - * state and probability entry. */ -static inline int -lzms_range_decode_bit(struct lzms_range_decoder *rd, - u32 *state_p, u32 num_states, - struct lzms_probability_entry prob_entries[]) -{ - struct lzms_probability_entry *prob_entry; - u32 prob; - int bit; - - /* Load the probability entry corresponding to the current state. */ - prob_entry = &prob_entries[*state_p]; - - /* Get the probability that the next bit is 0. */ - prob = lzms_get_probability(prob_entry); - - /* Decode the next bit. */ - bit = lzms_range_decoder_decode_bit(rd, prob); - - /* Update the state and probability entry based on the decoded bit. */ - *state_p = ((*state_p << 1) | bit) & (num_states - 1); - lzms_update_probability_entry(prob_entry, bit); - - /* Return the decoded bit. */ - return bit; -} - -static int -lzms_decode_main_bit(struct lzms_decompressor *d) -{ - return lzms_range_decode_bit(&d->rd, &d->main_state, - LZMS_NUM_MAIN_STATES, - d->main_prob_entries); -} - -static int -lzms_decode_match_bit(struct lzms_decompressor *d) -{ - return lzms_range_decode_bit(&d->rd, &d->match_state, - LZMS_NUM_MATCH_STATES, - d->match_prob_entries); -} - -static int -lzms_decode_lz_match_bit(struct lzms_decompressor *d) -{ - return lzms_range_decode_bit(&d->rd, &d->lz_match_state, - LZMS_NUM_LZ_MATCH_STATES, - d->lz_match_prob_entries); -} - -static int -lzms_decode_delta_match_bit(struct lzms_decompressor *d) -{ - return lzms_range_decode_bit(&d->rd, &d->delta_match_state, - LZMS_NUM_DELTA_MATCH_STATES, - d->delta_match_prob_entries); -} - -static noinline int -lzms_decode_lz_repeat_match_bit(struct lzms_decompressor *d, int idx) +static void +lzms_build_huffman_code(struct lzms_huffman_rebuild_info *rebuild_info) { - return lzms_range_decode_bit(&d->rd, &d->lz_repeat_match_states[idx], - LZMS_NUM_LZ_REPEAT_MATCH_STATES, - d->lz_repeat_match_prob_entries[idx]); + make_canonical_huffman_code(rebuild_info->num_syms, + LZMS_MAX_CODEWORD_LENGTH, + rebuild_info->freqs, + (u8 *)rebuild_info->decode_table, + rebuild_info->codewords); + + make_huffman_decode_table(rebuild_info->decode_table, + rebuild_info->num_syms, + rebuild_info->table_bits, + (u8 *)rebuild_info->decode_table, + LZMS_MAX_CODEWORD_LENGTH); + + rebuild_info->num_syms_until_rebuild = rebuild_info->rebuild_freq; } -static noinline int -lzms_decode_delta_repeat_match_bit(struct lzms_decompressor *d, int idx) +static void +lzms_init_huffman_code(struct lzms_huffman_rebuild_info *rebuild_info, + unsigned num_syms, unsigned rebuild_freq, + u32 *codewords, u32 *freqs, + u16 *decode_table, unsigned table_bits) { - return lzms_range_decode_bit(&d->rd, &d->delta_repeat_match_states[idx], - LZMS_NUM_DELTA_REPEAT_MATCH_STATES, - d->delta_repeat_match_prob_entries[idx]); + rebuild_info->num_syms = num_syms; + rebuild_info->rebuild_freq = rebuild_freq; + rebuild_info->codewords = codewords; + rebuild_info->freqs = freqs; + rebuild_info->decode_table = decode_table; + rebuild_info->table_bits = table_bits; + lzms_init_symbol_frequencies(freqs, num_syms); + lzms_build_huffman_code(rebuild_info); } static void -lzms_init_huffman_rebuild_info(struct lzms_huffman_rebuild_info *info, - unsigned rebuild_freq, - u16 *decode_table, unsigned table_bits, - u32 *freqs, u32 *codewords, u8 *lens, - unsigned num_syms) +lzms_init_huffman_codes(struct lzms_decompressor *d, unsigned num_offset_slots) { - info->num_syms_until_rebuild = 1; - info->rebuild_freq = rebuild_freq; - info->decode_table = decode_table; - info->table_bits = table_bits; - info->freqs = freqs; - info->codewords = codewords; - info->lens = lens; - info->num_syms = num_syms; - lzms_init_symbol_frequencies(freqs, num_syms); + lzms_init_huffman_code(&d->literal_rebuild_info, + LZMS_NUM_LITERAL_SYMS, + LZMS_LITERAL_CODE_REBUILD_FREQ, + d->codewords, + d->literal_freqs, + d->literal_decode_table, + LZMS_LITERAL_TABLEBITS); + + lzms_init_huffman_code(&d->lz_offset_rebuild_info, + num_offset_slots, + LZMS_LZ_OFFSET_CODE_REBUILD_FREQ, + d->codewords, + d->lz_offset_freqs, + d->lz_offset_decode_table, + LZMS_LZ_OFFSET_TABLEBITS); + + lzms_init_huffman_code(&d->length_rebuild_info, + LZMS_NUM_LENGTH_SYMS, + LZMS_LENGTH_CODE_REBUILD_FREQ, + d->codewords, + d->length_freqs, + d->length_decode_table, + LZMS_LENGTH_TABLEBITS); + + lzms_init_huffman_code(&d->delta_offset_rebuild_info, + num_offset_slots, + LZMS_DELTA_OFFSET_CODE_REBUILD_FREQ, + d->codewords, + d->delta_offset_freqs, + d->delta_offset_decode_table, + LZMS_DELTA_OFFSET_TABLEBITS); + + lzms_init_huffman_code(&d->delta_power_rebuild_info, + LZMS_NUM_DELTA_POWER_SYMS, + LZMS_DELTA_POWER_CODE_REBUILD_FREQ, + d->codewords, + d->delta_power_freqs, + d->delta_power_decode_table, + LZMS_DELTA_POWER_TABLEBITS); } static noinline void -lzms_rebuild_huffman_code(struct lzms_huffman_rebuild_info *info) +lzms_rebuild_huffman_code(struct lzms_huffman_rebuild_info *rebuild_info) { - make_canonical_huffman_code(info->num_syms, LZMS_MAX_CODEWORD_LEN, - info->freqs, info->lens, info->codewords); - make_huffman_decode_table(info->decode_table, info->num_syms, - info->table_bits, info->lens, - LZMS_MAX_CODEWORD_LEN); - for (unsigned i = 0; i < info->num_syms; i++) - info->freqs[i] = (info->freqs[i] >> 1) + 1; - info->num_syms_until_rebuild = info->rebuild_freq; + lzms_build_huffman_code(rebuild_info); + lzms_dilute_symbol_frequencies(rebuild_info->freqs, rebuild_info->num_syms); } static inline unsigned -lzms_decode_huffman_symbol(struct lzms_input_bitstream *is, - u16 decode_table[], unsigned table_bits, +lzms_decode_huffman_symbol(struct lzms_input_bitstream *is, u16 decode_table[], + unsigned table_bits, u32 freqs[], struct lzms_huffman_rebuild_info *rebuild_info) { unsigned key_bits; unsigned entry; unsigned sym; - if (unlikely(--rebuild_info->num_syms_until_rebuild == 0)) - lzms_rebuild_huffman_code(rebuild_info); - - lzms_ensure_bits(is, LZMS_MAX_CODEWORD_LEN); + lzms_ensure_bits(is, LZMS_MAX_CODEWORD_LENGTH); /* Index the decode table by the next table_bits bits of the input. */ key_bits = lzms_peek_bits(is, table_bits); @@ -606,362 +627,304 @@ lzms_decode_huffman_symbol(struct lzms_input_bitstream *is, sym = entry; } - /* Tally and return the decoded symbol. */ - rebuild_info->freqs[sym]++; + freqs[sym]++; + if (--rebuild_info->num_syms_until_rebuild == 0) + lzms_rebuild_huffman_code(rebuild_info); return sym; } -static unsigned -lzms_decode_literal(struct lzms_decompressor *d) +static inline unsigned +lzms_decode_literal(struct lzms_decompressor *d, + struct lzms_input_bitstream *is) { - return lzms_decode_huffman_symbol(&d->is, + return lzms_decode_huffman_symbol(is, d->literal_decode_table, LZMS_LITERAL_TABLEBITS, + d->literal_freqs, &d->literal_rebuild_info); } -static u32 -lzms_decode_length(struct lzms_decompressor *d) +static inline u32 +lzms_decode_lz_offset(struct lzms_decompressor *d, + struct lzms_input_bitstream *is) { - unsigned slot = lzms_decode_huffman_symbol(&d->is, + unsigned slot = lzms_decode_huffman_symbol(is, + d->lz_offset_decode_table, + LZMS_LZ_OFFSET_TABLEBITS, + d->lz_offset_freqs, + &d->lz_offset_rebuild_info); + return lzms_offset_slot_base[slot] + + lzms_read_bits(is, lzms_extra_offset_bits[slot]); +} + +static inline u32 +lzms_decode_length(struct lzms_decompressor *d, + struct lzms_input_bitstream *is) +{ + unsigned slot = lzms_decode_huffman_symbol(is, d->length_decode_table, LZMS_LENGTH_TABLEBITS, + d->length_freqs, &d->length_rebuild_info); u32 length = lzms_length_slot_base[slot]; unsigned num_extra_bits = lzms_extra_length_bits[slot]; /* Usually most lengths are short and have no extra bits. */ if (num_extra_bits) - length += lzms_read_bits(&d->is, num_extra_bits); + length += lzms_read_bits(is, num_extra_bits); return length; } -static u32 -lzms_decode_lz_offset(struct lzms_decompressor *d) +static inline u32 +lzms_decode_delta_offset(struct lzms_decompressor *d, + struct lzms_input_bitstream *is) { - unsigned slot = lzms_decode_huffman_symbol(&d->is, - d->lz_offset_decode_table, - LZMS_LZ_OFFSET_TABLEBITS, - &d->lz_offset_rebuild_info); - return lzms_offset_slot_base[slot] + - lzms_read_bits(&d->is, lzms_extra_offset_bits[slot]); -} - -static u32 -lzms_decode_delta_offset(struct lzms_decompressor *d) -{ - unsigned slot = lzms_decode_huffman_symbol(&d->is, + unsigned slot = lzms_decode_huffman_symbol(is, d->delta_offset_decode_table, LZMS_DELTA_OFFSET_TABLEBITS, + d->delta_offset_freqs, &d->delta_offset_rebuild_info); return lzms_offset_slot_base[slot] + - lzms_read_bits(&d->is, lzms_extra_offset_bits[slot]); + lzms_read_bits(is, lzms_extra_offset_bits[slot]); } -static unsigned -lzms_decode_delta_power(struct lzms_decompressor *d) +static inline unsigned +lzms_decode_delta_power(struct lzms_decompressor *d, + struct lzms_input_bitstream *is) { - return lzms_decode_huffman_symbol(&d->is, + return lzms_decode_huffman_symbol(is, d->delta_power_decode_table, LZMS_DELTA_POWER_TABLEBITS, + d->delta_power_freqs, &d->delta_power_rebuild_info); } -/* Decode the series of literals and matches from the LZMS-compressed data. - * Return 0 if successful or -1 if the compressed data is invalid. */ static int -lzms_decode_items(struct lzms_decompressor * const restrict d, - u8 * const restrict out, const size_t out_nbytes) +lzms_create_decompressor(size_t max_bufsize, void **d_ret) +{ + struct lzms_decompressor *d; + + if (max_bufsize > LZMS_MAX_BUFFER_SIZE) + return WIMLIB_ERR_INVALID_PARAM; + + d = ALIGNED_MALLOC(sizeof(struct lzms_decompressor), + DECODE_TABLE_ALIGNMENT); + if (!d) + return WIMLIB_ERR_NOMEM; + + *d_ret = d; + return 0; +} + +/* + * Decompress @in_nbytes bytes of LZMS-compressed data at @in and write the + * uncompressed data, which had original size @out_nbytes, to @out. Return 0 if + * successful or -1 if the compressed data is invalid. + */ +static int +lzms_decompress(const void * const restrict in, const size_t in_nbytes, + void * const restrict out, const size_t out_nbytes, + void * const restrict _d) { + struct lzms_decompressor *d = _d; u8 *out_next = out; u8 * const out_end = out + out_nbytes; + struct lzms_range_decoder rd; + struct lzms_input_bitstream is; - while (out_next != out_end) { + /* LRU queues for match sources */ + u32 recent_lz_offsets[LZMS_NUM_LZ_REPS + 1]; + u64 recent_delta_pairs[LZMS_NUM_DELTA_REPS + 1]; + + /* Previous item type: 0 = literal, 1 = LZ match, 2 = delta match. + * This is used to handle delayed updates of the LRU queues. Instead of + * actually delaying the updates, we can check when decoding each rep + * match whether a delayed update needs to be taken into account, and if + * so get the match source from slot 'rep_idx + 1' instead of from slot + * 'rep_idx'. */ + unsigned prev_item_type = 0; + + /* States and probability entries for item type disambiguation */ + u32 main_state = 0; + u32 match_state = 0; + u32 lz_state = 0; + u32 delta_state = 0; + u32 lz_rep_states[LZMS_NUM_LZ_REP_DECISIONS] = {}; + u32 delta_rep_states[LZMS_NUM_DELTA_REP_DECISIONS] = {}; + + /* + * Requirements on the compressed data: + * + * 1. LZMS-compressed data is a series of 16-bit integers, so the + * compressed data buffer cannot take up an odd number of bytes. + * 2. There must be at least 4 bytes of compressed data, since otherwise + * we cannot even initialize the range decoder. + */ + if ((in_nbytes & 1) || (in_nbytes < 4)) + return -1; - if (!lzms_decode_main_bit(d)) { + lzms_range_decoder_init(&rd, in, in_nbytes); - /* Literal */ - *out_next++ = lzms_decode_literal(d); + lzms_input_bitstream_init(&is, in, in_nbytes); + + lzms_init_probabilities(&d->probs); - } else if (!lzms_decode_match_bit(d)) { + lzms_init_huffman_codes(d, lzms_get_num_offset_slots(out_nbytes)); + for (int i = 0; i < LZMS_NUM_LZ_REPS + 1; i++) + recent_lz_offsets[i] = i + 1; + + for (int i = 0; i < LZMS_NUM_DELTA_REPS + 1; i++) + recent_delta_pairs[i] = i + 1; + + /* Main decode loop */ + while (out_next != out_end) { + + if (!lzms_decode_bit(&rd, &main_state, + LZMS_NUM_MAIN_PROBS, d->probs.main)) + { + /* Literal */ + *out_next++ = lzms_decode_literal(d, &is); + prev_item_type = 0; + + } else if (!lzms_decode_bit(&rd, &match_state, + LZMS_NUM_MATCH_PROBS, + d->probs.match)) + { /* LZ match */ u32 offset; u32 length; - if (d->pending_lz_offset != 0 && - out_next != d->lz_offset_still_pending) - { - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - d->recent_lz_offsets[3] = d->recent_lz_offsets[2]; - d->recent_lz_offsets[2] = d->recent_lz_offsets[1]; - d->recent_lz_offsets[1] = d->recent_lz_offsets[0]; - d->recent_lz_offsets[0] = d->pending_lz_offset; - d->pending_lz_offset = 0; - } + STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3); - if (!lzms_decode_lz_match_bit(d)) { + if (!lzms_decode_bit(&rd, &lz_state, + LZMS_NUM_LZ_PROBS, d->probs.lz)) + { /* Explicit offset */ - offset = lzms_decode_lz_offset(d); + offset = lzms_decode_lz_offset(d, &is); + + recent_lz_offsets[3] = recent_lz_offsets[2]; + recent_lz_offsets[2] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } else { /* Repeat offset */ - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - if (!lzms_decode_lz_repeat_match_bit(d, 0)) { - offset = d->recent_lz_offsets[0]; - d->recent_lz_offsets[0] = d->recent_lz_offsets[1]; - d->recent_lz_offsets[1] = d->recent_lz_offsets[2]; - d->recent_lz_offsets[2] = d->recent_lz_offsets[3]; - } else if (!lzms_decode_lz_repeat_match_bit(d, 1)) { - offset = d->recent_lz_offsets[1]; - d->recent_lz_offsets[1] = d->recent_lz_offsets[2]; - d->recent_lz_offsets[2] = d->recent_lz_offsets[3]; + if (!lzms_decode_bit(&rd, &lz_rep_states[0], + LZMS_NUM_LZ_REP_PROBS, + d->probs.lz_rep[0])) + { + offset = recent_lz_offsets[0 + (prev_item_type & 1)]; + recent_lz_offsets[0 + (prev_item_type & 1)] = recent_lz_offsets[0]; + } else if (!lzms_decode_bit(&rd, &lz_rep_states[1], + LZMS_NUM_LZ_REP_PROBS, + d->probs.lz_rep[1])) + { + offset = recent_lz_offsets[1 + (prev_item_type & 1)]; + recent_lz_offsets[1 + (prev_item_type & 1)] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } else { - offset = d->recent_lz_offsets[2]; - d->recent_lz_offsets[2] = d->recent_lz_offsets[3]; + offset = recent_lz_offsets[2 + (prev_item_type & 1)]; + recent_lz_offsets[2 + (prev_item_type & 1)] = recent_lz_offsets[2]; + recent_lz_offsets[2] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } } + recent_lz_offsets[0] = offset; + prev_item_type = 1; - if (d->pending_lz_offset != 0) { - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - d->recent_lz_offsets[3] = d->recent_lz_offsets[2]; - d->recent_lz_offsets[2] = d->recent_lz_offsets[1]; - d->recent_lz_offsets[1] = d->recent_lz_offsets[0]; - d->recent_lz_offsets[0] = d->pending_lz_offset; - } - d->pending_lz_offset = offset; - - length = lzms_decode_length(d); + length = lzms_decode_length(d, &is); if (unlikely(length > out_end - out_next)) return -1; - if (unlikely(offset > out_next - out)) + if (unlikely(offset > out_next - (u8 *)out)) return -1; - lz_copy(out_next, length, offset, out_end, LZMS_MIN_MATCH_LEN); + lz_copy(out_next, length, offset, out_end, LZMS_MIN_MATCH_LENGTH); out_next += length; - - d->lz_offset_still_pending = out_next; } else { /* Delta match */ + /* (See beginning of file for more information.) */ + u32 power; - u32 raw_offset, offset1, offset2, offset; - const u8 *matchptr1, *matchptr2, *matchptr; + u32 raw_offset; + u32 span; + u32 offset; + const u8 *matchptr; u32 length; + u64 pair; - if (d->pending_delta_offset != 0 && - out_next != d->delta_offset_still_pending) - { - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - d->recent_delta_offsets[3] = d->recent_delta_offsets[2]; - d->recent_delta_offsets[2] = d->recent_delta_offsets[1]; - d->recent_delta_offsets[1] = d->recent_delta_offsets[0]; - d->recent_delta_offsets[0] = d->pending_delta_offset; - d->pending_delta_offset = 0; - } + STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3); - if (!lzms_decode_delta_match_bit(d)) { + if (!lzms_decode_bit(&rd, &delta_state, + LZMS_NUM_DELTA_PROBS, + d->probs.delta)) + { /* Explicit offset */ - power = lzms_decode_delta_power(d); - raw_offset = lzms_decode_delta_offset(d); + power = lzms_decode_delta_power(d, &is); + raw_offset = lzms_decode_delta_offset(d, &is); + + pair = ((u64)power << 32) | raw_offset; + recent_delta_pairs[3] = recent_delta_pairs[2]; + recent_delta_pairs[2] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; } else { - /* Repeat offset */ - u64 val; - - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - if (!lzms_decode_delta_repeat_match_bit(d, 0)) { - val = d->recent_delta_offsets[0]; - d->recent_delta_offsets[0] = d->recent_delta_offsets[1]; - d->recent_delta_offsets[1] = d->recent_delta_offsets[2]; - d->recent_delta_offsets[2] = d->recent_delta_offsets[3]; - } else if (!lzms_decode_delta_repeat_match_bit(d, 1)) { - val = d->recent_delta_offsets[1]; - d->recent_delta_offsets[1] = d->recent_delta_offsets[2]; - d->recent_delta_offsets[2] = d->recent_delta_offsets[3]; + if (!lzms_decode_bit(&rd, &delta_rep_states[0], + LZMS_NUM_DELTA_REP_PROBS, + d->probs.delta_rep[0])) + { + pair = recent_delta_pairs[0 + (prev_item_type >> 1)]; + recent_delta_pairs[0 + (prev_item_type >> 1)] = recent_delta_pairs[0]; + } else if (!lzms_decode_bit(&rd, &delta_rep_states[1], + LZMS_NUM_DELTA_REP_PROBS, + d->probs.delta_rep[1])) + { + pair = recent_delta_pairs[1 + (prev_item_type >> 1)]; + recent_delta_pairs[1 + (prev_item_type >> 1)] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; } else { - val = d->recent_delta_offsets[2]; - d->recent_delta_offsets[2] = d->recent_delta_offsets[3]; + pair = recent_delta_pairs[2 + (prev_item_type >> 1)]; + recent_delta_pairs[2 + (prev_item_type >> 1)] = recent_delta_pairs[2]; + recent_delta_pairs[2] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; } - power = val >> 32; - raw_offset = (u32)val; - } - if (d->pending_delta_offset != 0) { - BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3); - d->recent_delta_offsets[3] = d->recent_delta_offsets[2]; - d->recent_delta_offsets[2] = d->recent_delta_offsets[1]; - d->recent_delta_offsets[1] = d->recent_delta_offsets[0]; - d->recent_delta_offsets[0] = d->pending_delta_offset; + power = pair >> 32; + raw_offset = (u32)pair; } - d->pending_delta_offset = raw_offset | ((u64)power << 32); + recent_delta_pairs[0] = pair; + prev_item_type = 2; - length = lzms_decode_length(d); + length = lzms_decode_length(d, &is); - offset1 = (u32)1 << power; - offset2 = raw_offset << power; - offset = offset1 + offset2; + span = (u32)1 << power; + offset = raw_offset << power; - /* raw_offset<> power) != raw_offset)) + /* raw_offset<> power != raw_offset)) return -1; - /* offset1+offset2 overflowed? */ - if (unlikely(offset < offset2)) + /* offset+span overflows? */ + if (unlikely(offset + span < offset)) return -1; - if (unlikely(length > out_end - out_next)) + /* buffer underrun? */ + if (unlikely(offset + span > out_next - (u8 *)out)) return -1; - if (unlikely(offset > out_next - out)) + /* buffer overrun? */ + if (unlikely(length > out_end - out_next)) return -1; - matchptr1 = out_next - offset1; - matchptr2 = out_next - offset2; matchptr = out_next - offset; - do { - *out_next++ = *matchptr1++ + *matchptr2++ - *matchptr++; + *out_next = *matchptr + *(out_next - span) - + *(matchptr - span); + out_next++; + matchptr++; } while (--length); - - d->delta_offset_still_pending = out_next; } } - return 0; -} - -static void -lzms_init_decompressor(struct lzms_decompressor *d, const void *in, - size_t in_nbytes, unsigned num_offset_slots) -{ - /* Match offset LRU queues */ - for (int i = 0; i < LZMS_NUM_RECENT_OFFSETS + 1; i++) { - d->recent_lz_offsets[i] = i + 1; - d->recent_delta_offsets[i] = i + 1; - } - d->pending_lz_offset = 0; - d->pending_delta_offset = 0; - - /* Range decoding */ - - lzms_range_decoder_init(&d->rd, in, in_nbytes / sizeof(le16)); - - d->main_state = 0; - lzms_init_probability_entries(d->main_prob_entries, LZMS_NUM_MAIN_STATES); - - d->match_state = 0; - lzms_init_probability_entries(d->match_prob_entries, LZMS_NUM_MATCH_STATES); - - d->lz_match_state = 0; - lzms_init_probability_entries(d->lz_match_prob_entries, LZMS_NUM_LZ_MATCH_STATES); - - d->delta_match_state = 0; - lzms_init_probability_entries(d->delta_match_prob_entries, LZMS_NUM_DELTA_MATCH_STATES); - - for (int i = 0; i < LZMS_NUM_RECENT_OFFSETS - 1; i++) { - d->lz_repeat_match_states[i] = 0; - lzms_init_probability_entries(d->lz_repeat_match_prob_entries[i], - LZMS_NUM_LZ_REPEAT_MATCH_STATES); - - d->delta_repeat_match_states[i] = 0; - lzms_init_probability_entries(d->delta_repeat_match_prob_entries[i], - LZMS_NUM_DELTA_REPEAT_MATCH_STATES); - } - - /* Huffman decoding */ - - lzms_input_bitstream_init(&d->is, in, in_nbytes / sizeof(le16)); - - lzms_init_huffman_rebuild_info(&d->literal_rebuild_info, - LZMS_LITERAL_CODE_REBUILD_FREQ, - d->literal_decode_table, - LZMS_LITERAL_TABLEBITS, - d->literal_freqs, - d->codewords, - d->lens, - LZMS_NUM_LITERAL_SYMS); - - lzms_init_huffman_rebuild_info(&d->length_rebuild_info, - LZMS_LENGTH_CODE_REBUILD_FREQ, - d->length_decode_table, - LZMS_LENGTH_TABLEBITS, - d->length_freqs, - d->codewords, - d->lens, - LZMS_NUM_LENGTH_SYMS); - - lzms_init_huffman_rebuild_info(&d->lz_offset_rebuild_info, - LZMS_LZ_OFFSET_CODE_REBUILD_FREQ, - d->lz_offset_decode_table, - LZMS_LZ_OFFSET_TABLEBITS, - d->lz_offset_freqs, - d->codewords, - d->lens, - num_offset_slots); - - lzms_init_huffman_rebuild_info(&d->delta_offset_rebuild_info, - LZMS_DELTA_OFFSET_CODE_REBUILD_FREQ, - d->delta_offset_decode_table, - LZMS_DELTA_OFFSET_TABLEBITS, - d->delta_offset_freqs, - d->codewords, - d->lens, - num_offset_slots); - - lzms_init_huffman_rebuild_info(&d->delta_power_rebuild_info, - LZMS_DELTA_POWER_CODE_REBUILD_FREQ, - d->delta_power_decode_table, - LZMS_DELTA_POWER_TABLEBITS, - d->delta_power_freqs, - d->codewords, - d->lens, - LZMS_NUM_DELTA_POWER_SYMS); -} - -static int -lzms_create_decompressor(size_t max_bufsize, void **d_ret) -{ - struct lzms_decompressor *d; - - if (max_bufsize > LZMS_MAX_BUFFER_SIZE) - return WIMLIB_ERR_INVALID_PARAM; - - d = ALIGNED_MALLOC(sizeof(struct lzms_decompressor), - DECODE_TABLE_ALIGNMENT); - if (!d) - return WIMLIB_ERR_NOMEM; - - *d_ret = d; - return 0; -} - -/* Decompress @in_nbytes bytes of LZMS-compressed data at @in and write the - * uncompressed data, which had original size @out_nbytes, to @out. Return 0 if - * successful or -1 if the compressed data is invalid. */ -static int -lzms_decompress(const void *in, size_t in_nbytes, void *out, size_t out_nbytes, - void *_d) -{ - struct lzms_decompressor *d = _d; - - /* - * Requirements on the compressed data: - * - * 1. LZMS-compressed data is a series of 16-bit integers, so the - * compressed data buffer cannot take up an odd number of bytes. - * 2. To prevent poor performance on some architectures, we require that - * the compressed data buffer is 2-byte aligned. - * 3. There must be at least 4 bytes of compressed data, since otherwise - * we cannot even initialize the range decoder. - */ - if ((in_nbytes & 1) || ((uintptr_t)in & 1) || (in_nbytes < 4)) - return -1; - - lzms_init_decompressor(d, in, in_nbytes, - lzms_get_num_offset_slots(out_nbytes)); - - if (lzms_decode_items(d, out, out_nbytes)) - return -1; lzms_x86_filter(out, out_nbytes, d->last_target_usages, true); return 0;