X-Git-Url: https://wimlib.net/git/?a=blobdiff_plain;f=src%2Flzms_decompress.c;h=4dd36627b37957faae16a14f639f27bf313e9f20;hb=a9b5ef0483d60ef1d8bf6014f223dfeaa68c091e;hp=d5d51417352b2ff5b81a8da2fdaaa27ca6e886ed;hpb=723d5dbc1705200082f640453f19233a386bc655;p=wimlib diff --git a/src/lzms_decompress.c b/src/lzms_decompress.c index d5d51417..4dd36627 100644 --- a/src/lzms_decompress.c +++ b/src/lzms_decompress.c @@ -5,7 +5,7 @@ */ /* - * Copyright (C) 2013, 2014, 2015 Eric Biggers + * Copyright (C) 2013-2016 Eric Biggers * * This file is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free @@ -257,10 +257,10 @@ /* The TABLEBITS values can be changed; they only affect decoding speed. */ #define LZMS_LITERAL_TABLEBITS 10 -#define LZMS_LENGTH_TABLEBITS 10 -#define LZMS_LZ_OFFSET_TABLEBITS 10 -#define LZMS_DELTA_OFFSET_TABLEBITS 10 -#define LZMS_DELTA_POWER_TABLEBITS 8 +#define LZMS_LENGTH_TABLEBITS 9 +#define LZMS_LZ_OFFSET_TABLEBITS 11 +#define LZMS_DELTA_OFFSET_TABLEBITS 11 +#define LZMS_DELTA_POWER_TABLEBITS 7 struct lzms_range_decoder { @@ -276,10 +276,10 @@ struct lzms_range_decoder { /* Pointer to the next little-endian 16-bit integer in the compressed * input data (reading forwards). */ - const le16 *next; + const u8 *next; /* Pointer to the end of the compressed input data. */ - const le16 *end; + const u8 *end; }; typedef u64 bitbuf_t; @@ -295,10 +295,10 @@ struct lzms_input_bitstream { /* Pointer to the one past the next little-endian 16-bit integer in the * compressed input data (reading backwards). */ - const le16 *next; + const u8 *next; /* Pointer to the beginning of the compressed input data. */ - const le16 *begin; + const u8 *begin; }; #define BITBUF_NBITS (8 * sizeof(bitbuf_t)) @@ -323,37 +323,37 @@ struct lzms_decompressor { struct lzms_probabilites probs; - u16 literal_decode_table[(1 << LZMS_LITERAL_TABLEBITS) + - (2 * LZMS_NUM_LITERAL_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); + DECODE_TABLE(literal_decode_table, LZMS_NUM_LITERAL_SYMS, + LZMS_LITERAL_TABLEBITS, LZMS_MAX_CODEWORD_LENGTH); u32 literal_freqs[LZMS_NUM_LITERAL_SYMS]; struct lzms_huffman_rebuild_info literal_rebuild_info; - u16 lz_offset_decode_table[(1 << LZMS_LZ_OFFSET_TABLEBITS) + - ( 2 * LZMS_MAX_NUM_OFFSET_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); + DECODE_TABLE(lz_offset_decode_table, LZMS_MAX_NUM_OFFSET_SYMS, + LZMS_LZ_OFFSET_TABLEBITS, LZMS_MAX_CODEWORD_LENGTH); u32 lz_offset_freqs[LZMS_MAX_NUM_OFFSET_SYMS]; struct lzms_huffman_rebuild_info lz_offset_rebuild_info; - u16 length_decode_table[(1 << LZMS_LENGTH_TABLEBITS) + - (2 * LZMS_NUM_LENGTH_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); + DECODE_TABLE(length_decode_table, LZMS_NUM_LENGTH_SYMS, + LZMS_LENGTH_TABLEBITS, LZMS_MAX_CODEWORD_LENGTH); u32 length_freqs[LZMS_NUM_LENGTH_SYMS]; struct lzms_huffman_rebuild_info length_rebuild_info; - u16 delta_offset_decode_table[(1 << LZMS_DELTA_OFFSET_TABLEBITS) + - (2 * LZMS_MAX_NUM_OFFSET_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); + DECODE_TABLE(delta_offset_decode_table, LZMS_MAX_NUM_OFFSET_SYMS, + LZMS_DELTA_OFFSET_TABLEBITS, LZMS_MAX_CODEWORD_LENGTH); u32 delta_offset_freqs[LZMS_MAX_NUM_OFFSET_SYMS]; struct lzms_huffman_rebuild_info delta_offset_rebuild_info; - u16 delta_power_decode_table[(1 << LZMS_DELTA_POWER_TABLEBITS) + - (2 * LZMS_NUM_DELTA_POWER_SYMS)] - _aligned_attribute(DECODE_TABLE_ALIGNMENT); + DECODE_TABLE(delta_power_decode_table, LZMS_NUM_DELTA_POWER_SYMS, + LZMS_DELTA_POWER_TABLEBITS, LZMS_MAX_CODEWORD_LENGTH); u32 delta_power_freqs[LZMS_NUM_DELTA_POWER_SYMS]; struct lzms_huffman_rebuild_info delta_power_rebuild_info; - u32 codewords[LZMS_MAX_NUM_SYMS]; + /* Temporary space for lzms_build_huffman_code() */ + union { + u32 codewords[LZMS_MAX_NUM_SYMS]; + DECODE_TABLE_WORKING_SPACE(working_space, LZMS_MAX_NUM_SYMS, + LZMS_MAX_CODEWORD_LENGTH); + }; }; // struct @@ -363,10 +363,10 @@ struct lzms_decompressor { }; /* Initialize the input bitstream @is to read backwards from the compressed data - * buffer @in that is @count 16-bit integers long. */ + * buffer @in that is @count bytes long. */ static void lzms_input_bitstream_init(struct lzms_input_bitstream *is, - const le16 *in, size_t count) + const u8 *in, size_t count) { is->bitbuf = 0; is->bitsleft = 0; @@ -376,7 +376,7 @@ lzms_input_bitstream_init(struct lzms_input_bitstream *is, /* Ensure that at least @num_bits bits are in the bitbuffer variable. * @num_bits cannot be more than 32. */ -static inline void +static forceinline void lzms_ensure_bits(struct lzms_input_bitstream *is, unsigned num_bits) { unsigned avail; @@ -387,31 +387,35 @@ lzms_ensure_bits(struct lzms_input_bitstream *is, unsigned num_bits) avail = BITBUF_NBITS - is->bitsleft; if (UNALIGNED_ACCESS_IS_FAST && CPU_IS_LITTLE_ENDIAN && - WORDSIZE == 8 && likely((u8 *)is->next - (u8 *)is->begin >= 8)) + WORDBYTES == 8 && likely(is->next - is->begin >= 8)) { - is->next -= avail >> 4; + is->next -= (avail & ~15) >> 3; is->bitbuf |= load_u64_unaligned(is->next) << (avail & 15); is->bitsleft += avail & ~15; } else { - if (likely(is->next != is->begin)) - is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next) + if (likely(is->next != is->begin)) { + is->next -= sizeof(le16); + is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next) << (avail - 16); - if (likely(is->next != is->begin)) - is->bitbuf |=(bitbuf_t)le16_to_cpu(*--is->next) + } + if (likely(is->next != is->begin)) { + is->next -= sizeof(le16); + is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next) << (avail - 32); + } is->bitsleft += 32; } } /* Get @num_bits bits from the bitbuffer variable. */ -static inline bitbuf_t +static forceinline bitbuf_t lzms_peek_bits(struct lzms_input_bitstream *is, unsigned num_bits) { return (is->bitbuf >> 1) >> (BITBUF_NBITS - num_bits - 1); } /* Remove @num_bits bits from the bitbuffer variable. */ -static inline void +static forceinline void lzms_remove_bits(struct lzms_input_bitstream *is, unsigned num_bits) { is->bitbuf <<= num_bits; @@ -419,7 +423,7 @@ lzms_remove_bits(struct lzms_input_bitstream *is, unsigned num_bits) } /* Remove and return @num_bits bits from the bitbuffer variable. */ -static inline bitbuf_t +static forceinline bitbuf_t lzms_pop_bits(struct lzms_input_bitstream *is, unsigned num_bits) { bitbuf_t bits = lzms_peek_bits(is, num_bits); @@ -428,7 +432,7 @@ lzms_pop_bits(struct lzms_input_bitstream *is, unsigned num_bits) } /* Read @num_bits bits from the input bitstream. */ -static inline bitbuf_t +static forceinline bitbuf_t lzms_read_bits(struct lzms_input_bitstream *is, unsigned num_bits) { lzms_ensure_bits(is, num_bits); @@ -436,14 +440,15 @@ lzms_read_bits(struct lzms_input_bitstream *is, unsigned num_bits) } /* Initialize the range decoder @rd to read forwards from the compressed data - * buffer @in that is @count 16-bit integers long. */ + * buffer @in that is @count bytes long. */ static void lzms_range_decoder_init(struct lzms_range_decoder *rd, - const le16 *in, size_t count) + const u8 *in, size_t count) { rd->range = 0xffffffff; - rd->code = ((u32)le16_to_cpu(in[0]) << 16) | le16_to_cpu(in[1]); - rd->next = in + 2; + rd->code = ((u32)get_unaligned_le16(in) << 16) | + get_unaligned_le16(in + 2); + rd->next = in + 4; rd->end = in + count; } @@ -452,7 +457,7 @@ lzms_range_decoder_init(struct lzms_range_decoder *rd, * probability entry to use. The state and probability entry will be updated * based on the decoded bit. */ -static inline int +static forceinline int lzms_decode_bit(struct lzms_range_decoder *rd, u32 *state_p, u32 num_states, struct lzms_probability_entry *probs) { @@ -463,18 +468,24 @@ lzms_decode_bit(struct lzms_range_decoder *rd, u32 *state_p, u32 num_states, /* Load the probability entry corresponding to the current state. */ prob_entry = &probs[*state_p]; - /* Normalize if needed. */ - if (rd->range <= 0xffff) { - rd->range <<= 16; - rd->code <<= 16; - if (likely(rd->next != rd->end)) - rd->code |= le16_to_cpu(*rd->next++); - } + /* Update the state early. We'll still need to OR the state with 1 + * later if the decoded bit is a 1. */ + *state_p = (*state_p << 1) & (num_states - 1); /* Get the probability (out of LZMS_PROBABILITY_DENOMINATOR) that the * next bit is 0. */ prob = lzms_get_probability(prob_entry); + /* Normalize if needed. */ + if (!(rd->range & 0xFFFF0000)) { + rd->range <<= 16; + rd->code <<= 16; + if (likely(rd->next != rd->end)) { + rd->code |= get_unaligned_le16(rd->next); + rd->next += sizeof(le16); + } + } + /* Based on the probability, calculate the bound between the 0-bit * region and the 1-bit region of the range. */ bound = (rd->range >> LZMS_PROBABILITY_BITS) * prob; @@ -484,7 +495,6 @@ lzms_decode_bit(struct lzms_range_decoder *rd, u32 *state_p, u32 num_states, rd->range = bound; /* Update the state and probability entry based on the decoded bit. */ - *state_p = ((*state_p << 1) | 0) & (num_states - 1); lzms_update_probability_entry(prob_entry, 0); return 0; } else { @@ -493,8 +503,8 @@ lzms_decode_bit(struct lzms_range_decoder *rd, u32 *state_p, u32 num_states, rd->code -= bound; /* Update the state and probability entry based on the decoded bit. */ - *state_p = ((*state_p << 1) | 1) & (num_states - 1); lzms_update_probability_entry(prob_entry, 1); + *state_p |= 1; return 1; } } @@ -512,7 +522,8 @@ lzms_build_huffman_code(struct lzms_huffman_rebuild_info *rebuild_info) rebuild_info->num_syms, rebuild_info->table_bits, (u8 *)rebuild_info->decode_table, - LZMS_MAX_CODEWORD_LENGTH); + LZMS_MAX_CODEWORD_LENGTH, + (u16 *)rebuild_info->codewords); rebuild_info->num_syms_until_rebuild = rebuild_info->rebuild_freq; } @@ -584,46 +595,39 @@ lzms_rebuild_huffman_code(struct lzms_huffman_rebuild_info *rebuild_info) lzms_dilute_symbol_frequencies(rebuild_info->freqs, rebuild_info->num_syms); } -static inline unsigned +/* XXX: mostly copied from read_huffsym() in decompress_common.h because LZMS + * needs its own bitstream */ +static forceinline unsigned lzms_decode_huffman_symbol(struct lzms_input_bitstream *is, u16 decode_table[], unsigned table_bits, u32 freqs[], struct lzms_huffman_rebuild_info *rebuild_info) { - unsigned key_bits; unsigned entry; - unsigned sym; + unsigned symbol; + unsigned length; lzms_ensure_bits(is, LZMS_MAX_CODEWORD_LENGTH); - /* Index the decode table by the next table_bits bits of the input. */ - key_bits = lzms_peek_bits(is, table_bits); - entry = decode_table[key_bits]; - if (likely(entry < 0xC000)) { - /* Fast case: The decode table directly provided the symbol and - * codeword length. The low 11 bits are the symbol, and the - * high 5 bits are the codeword length. */ - lzms_remove_bits(is, entry >> 11); - sym = entry & 0x7FF; - } else { - /* Slow case: The codeword for the symbol is longer than - * table_bits, so the symbol does not have an entry directly in - * the first (1 << table_bits) entries of the decode table. - * Traverse the appropriate binary tree bit-by-bit in order to - * decode the symbol. */ + entry = decode_table[lzms_peek_bits(is, table_bits)]; + symbol = entry >> DECODE_TABLE_SYMBOL_SHIFT; + length = entry & DECODE_TABLE_LENGTH_MASK; + + if (entry >= (1U << (table_bits + DECODE_TABLE_SYMBOL_SHIFT))) { lzms_remove_bits(is, table_bits); - do { - key_bits = (entry & 0x3FFF) + lzms_pop_bits(is, 1); - } while ((entry = decode_table[key_bits]) >= 0xC000); - sym = entry; + entry = decode_table[symbol + lzms_peek_bits(is, length)]; + symbol = entry >> DECODE_TABLE_SYMBOL_SHIFT; + length = entry & DECODE_TABLE_LENGTH_MASK; } - freqs[sym]++; + lzms_remove_bits(is, length); + + freqs[symbol]++; if (--rebuild_info->num_syms_until_rebuild == 0) lzms_rebuild_huffman_code(rebuild_info); - return sym; + return symbol; } -static inline unsigned +static forceinline unsigned lzms_decode_literal(struct lzms_decompressor *d, struct lzms_input_bitstream *is) { @@ -634,7 +638,7 @@ lzms_decode_literal(struct lzms_decompressor *d, &d->literal_rebuild_info); } -static inline u32 +static forceinline u32 lzms_decode_lz_offset(struct lzms_decompressor *d, struct lzms_input_bitstream *is) { @@ -647,7 +651,7 @@ lzms_decode_lz_offset(struct lzms_decompressor *d, lzms_read_bits(is, lzms_extra_offset_bits[slot]); } -static inline u32 +static forceinline u32 lzms_decode_length(struct lzms_decompressor *d, struct lzms_input_bitstream *is) { @@ -664,7 +668,7 @@ lzms_decode_length(struct lzms_decompressor *d, return length; } -static inline u32 +static forceinline u32 lzms_decode_delta_offset(struct lzms_decompressor *d, struct lzms_input_bitstream *is) { @@ -677,7 +681,7 @@ lzms_decode_delta_offset(struct lzms_decompressor *d, lzms_read_bits(is, lzms_extra_offset_bits[slot]); } -static inline unsigned +static forceinline unsigned lzms_decode_delta_power(struct lzms_decompressor *d, struct lzms_input_bitstream *is) { @@ -724,10 +728,14 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, /* LRU queues for match sources */ u32 recent_lz_offsets[LZMS_NUM_LZ_REPS + 1]; u64 recent_delta_pairs[LZMS_NUM_DELTA_REPS + 1]; - u32 pending_lz_offset = 0; - u64 pending_delta_pair = 0; - const u8 *lz_offset_still_pending; - const u8 *delta_pair_still_pending; + + /* Previous item type: 0 = literal, 1 = LZ match, 2 = delta match. + * This is used to handle delayed updates of the LRU queues. Instead of + * actually delaying the updates, we can check when decoding each rep + * match whether a delayed update needs to be taken into account, and if + * so get the match source from slot 'rep_idx + 1' instead of from slot + * 'rep_idx'. */ + unsigned prev_item_type = 0; /* States and probability entries for item type disambiguation */ u32 main_state = 0; @@ -742,17 +750,15 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, * * 1. LZMS-compressed data is a series of 16-bit integers, so the * compressed data buffer cannot take up an odd number of bytes. - * 2. To prevent poor performance on some architectures, we require that - * the compressed data buffer is 2-byte aligned. - * 3. There must be at least 4 bytes of compressed data, since otherwise + * 2. There must be at least 4 bytes of compressed data, since otherwise * we cannot even initialize the range decoder. */ - if ((in_nbytes & 1) || ((uintptr_t)in & 1) || (in_nbytes < 4)) + if ((in_nbytes & 1) || (in_nbytes < 4)) return -1; - lzms_range_decoder_init(&rd, in, in_nbytes / sizeof(le16)); + lzms_range_decoder_init(&rd, in, in_nbytes); - lzms_input_bitstream_init(&is, in, in_nbytes / sizeof(le16)); + lzms_input_bitstream_init(&is, in, in_nbytes); lzms_init_probabilities(&d->probs); @@ -772,6 +778,7 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, { /* Literal */ *out_next++ = lzms_decode_literal(d, &is); + prev_item_type = 0; } else if (!lzms_decode_bit(&rd, &match_state, LZMS_NUM_MATCH_PROBS, @@ -782,67 +789,50 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, u32 offset; u32 length; + STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3); + if (!lzms_decode_bit(&rd, &lz_state, LZMS_NUM_LZ_PROBS, d->probs.lz)) { /* Explicit offset */ offset = lzms_decode_lz_offset(d, &is); + + recent_lz_offsets[3] = recent_lz_offsets[2]; + recent_lz_offsets[2] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } else { /* Repeat offset */ - if (pending_lz_offset != 0 && - out_next != lz_offset_still_pending) - { - BUILD_BUG_ON(LZMS_NUM_LZ_REPS != 3); - recent_lz_offsets[3] = recent_lz_offsets[2]; - recent_lz_offsets[2] = recent_lz_offsets[1]; - recent_lz_offsets[1] = recent_lz_offsets[0]; - recent_lz_offsets[0] = pending_lz_offset; - pending_lz_offset = 0; - } - - BUILD_BUG_ON(LZMS_NUM_LZ_REPS != 3); if (!lzms_decode_bit(&rd, &lz_rep_states[0], LZMS_NUM_LZ_REP_PROBS, d->probs.lz_rep[0])) { - offset = recent_lz_offsets[0]; - recent_lz_offsets[0] = recent_lz_offsets[1]; - recent_lz_offsets[1] = recent_lz_offsets[2]; - recent_lz_offsets[2] = recent_lz_offsets[3]; + offset = recent_lz_offsets[0 + (prev_item_type & 1)]; + recent_lz_offsets[0 + (prev_item_type & 1)] = recent_lz_offsets[0]; } else if (!lzms_decode_bit(&rd, &lz_rep_states[1], LZMS_NUM_LZ_REP_PROBS, d->probs.lz_rep[1])) { - offset = recent_lz_offsets[1]; - recent_lz_offsets[1] = recent_lz_offsets[2]; - recent_lz_offsets[2] = recent_lz_offsets[3]; + offset = recent_lz_offsets[1 + (prev_item_type & 1)]; + recent_lz_offsets[1 + (prev_item_type & 1)] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } else { - offset = recent_lz_offsets[2]; - recent_lz_offsets[2] = recent_lz_offsets[3]; + offset = recent_lz_offsets[2 + (prev_item_type & 1)]; + recent_lz_offsets[2 + (prev_item_type & 1)] = recent_lz_offsets[2]; + recent_lz_offsets[2] = recent_lz_offsets[1]; + recent_lz_offsets[1] = recent_lz_offsets[0]; } } - - if (pending_lz_offset != 0) { - BUILD_BUG_ON(LZMS_NUM_LZ_REPS != 3); - recent_lz_offsets[3] = recent_lz_offsets[2]; - recent_lz_offsets[2] = recent_lz_offsets[1]; - recent_lz_offsets[1] = recent_lz_offsets[0]; - recent_lz_offsets[0] = pending_lz_offset; - } - pending_lz_offset = offset; + recent_lz_offsets[0] = offset; + prev_item_type = 1; length = lzms_decode_length(d, &is); - if (unlikely(length > out_end - out_next)) - return -1; - if (unlikely(offset > out_next - (u8 *)out)) + if (unlikely(lz_copy(length, offset, out, out_next, out_end, + LZMS_MIN_MATCH_LENGTH))) return -1; - lz_copy(out_next, length, offset, out_end, LZMS_MIN_MATCH_LENGTH); out_next += length; - - lz_offset_still_pending = out_next; } else { /* Delta match */ @@ -854,6 +844,9 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, u32 offset; const u8 *matchptr; u32 length; + u64 pair; + + STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3); if (!lzms_decode_bit(&rd, &delta_state, LZMS_NUM_DELTA_PROBS, @@ -862,53 +855,37 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, /* Explicit offset */ power = lzms_decode_delta_power(d, &is); raw_offset = lzms_decode_delta_offset(d, &is); - } else { - /* Repeat offset */ - u64 val; - if (pending_delta_pair != 0 && - out_next != delta_pair_still_pending) - { - BUILD_BUG_ON(LZMS_NUM_DELTA_REPS != 3); - recent_delta_pairs[3] = recent_delta_pairs[2]; - recent_delta_pairs[2] = recent_delta_pairs[1]; - recent_delta_pairs[1] = recent_delta_pairs[0]; - recent_delta_pairs[0] = pending_delta_pair; - pending_delta_pair = 0; - } - - BUILD_BUG_ON(LZMS_NUM_DELTA_REPS != 3); + pair = ((u64)power << 32) | raw_offset; + recent_delta_pairs[3] = recent_delta_pairs[2]; + recent_delta_pairs[2] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; + } else { if (!lzms_decode_bit(&rd, &delta_rep_states[0], LZMS_NUM_DELTA_REP_PROBS, d->probs.delta_rep[0])) { - val = recent_delta_pairs[0]; - recent_delta_pairs[0] = recent_delta_pairs[1]; - recent_delta_pairs[1] = recent_delta_pairs[2]; - recent_delta_pairs[2] = recent_delta_pairs[3]; + pair = recent_delta_pairs[0 + (prev_item_type >> 1)]; + recent_delta_pairs[0 + (prev_item_type >> 1)] = recent_delta_pairs[0]; } else if (!lzms_decode_bit(&rd, &delta_rep_states[1], LZMS_NUM_DELTA_REP_PROBS, d->probs.delta_rep[1])) { - val = recent_delta_pairs[1]; - recent_delta_pairs[1] = recent_delta_pairs[2]; - recent_delta_pairs[2] = recent_delta_pairs[3]; + pair = recent_delta_pairs[1 + (prev_item_type >> 1)]; + recent_delta_pairs[1 + (prev_item_type >> 1)] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; } else { - val = recent_delta_pairs[2]; - recent_delta_pairs[2] = recent_delta_pairs[3]; + pair = recent_delta_pairs[2 + (prev_item_type >> 1)]; + recent_delta_pairs[2 + (prev_item_type >> 1)] = recent_delta_pairs[2]; + recent_delta_pairs[2] = recent_delta_pairs[1]; + recent_delta_pairs[1] = recent_delta_pairs[0]; } - power = val >> 32; - raw_offset = (u32)val; - } - if (pending_delta_pair != 0) { - BUILD_BUG_ON(LZMS_NUM_DELTA_REPS != 3); - recent_delta_pairs[3] = recent_delta_pairs[2]; - recent_delta_pairs[2] = recent_delta_pairs[1]; - recent_delta_pairs[1] = recent_delta_pairs[0]; - recent_delta_pairs[0] = pending_delta_pair; + power = pair >> 32; + raw_offset = (u32)pair; } - pending_delta_pair = raw_offset | ((u64)power << 32); + recent_delta_pairs[0] = pair; + prev_item_type = 2; length = lzms_decode_length(d, &is); @@ -938,8 +915,6 @@ lzms_decompress(const void * const restrict in, const size_t in_nbytes, out_next++; matchptr++; } while (--length); - - delta_pair_still_pending = out_next; } }