/* Pointer to the next little-endian 16-bit integer in the compressed
* input data (reading forwards). */
- const le16 *next;
+ const u8 *next;
/* Pointer to the end of the compressed input data. */
- const le16 *end;
+ const u8 *end;
};
typedef u64 bitbuf_t;
/* Pointer to the one past the next little-endian 16-bit integer in the
* compressed input data (reading backwards). */
- const le16 *next;
+ const u8 *next;
/* Pointer to the beginning of the compressed input data. */
- const le16 *begin;
+ const u8 *begin;
};
#define BITBUF_NBITS (8 * sizeof(bitbuf_t))
};
/* Initialize the input bitstream @is to read backwards from the compressed data
- * buffer @in that is @count 16-bit integers long. */
+ * buffer @in that is @count bytes long. */
static void
lzms_input_bitstream_init(struct lzms_input_bitstream *is,
- const le16 *in, size_t count)
+ const u8 *in, size_t count)
{
is->bitbuf = 0;
is->bitsleft = 0;
avail = BITBUF_NBITS - is->bitsleft;
if (UNALIGNED_ACCESS_IS_FAST && CPU_IS_LITTLE_ENDIAN &&
- WORDSIZE == 8 && likely((u8 *)is->next - (u8 *)is->begin >= 8))
+ WORDSIZE == 8 && likely(is->next - is->begin >= 8))
{
- is->next -= avail >> 4;
+ is->next -= (avail & ~15) >> 3;
is->bitbuf |= load_u64_unaligned(is->next) << (avail & 15);
is->bitsleft += avail & ~15;
} else {
- if (likely(is->next != is->begin))
- is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next)
+ if (likely(is->next != is->begin)) {
+ is->next -= sizeof(le16);
+ is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next)
<< (avail - 16);
- if (likely(is->next != is->begin))
- is->bitbuf |=(bitbuf_t)le16_to_cpu(*--is->next)
+ }
+ if (likely(is->next != is->begin)) {
+ is->next -= sizeof(le16);
+ is->bitbuf |= (bitbuf_t)get_unaligned_le16(is->next)
<< (avail - 32);
+ }
is->bitsleft += 32;
}
}
}
/* Initialize the range decoder @rd to read forwards from the compressed data
- * buffer @in that is @count 16-bit integers long. */
+ * buffer @in that is @count bytes long. */
static void
lzms_range_decoder_init(struct lzms_range_decoder *rd,
- const le16 *in, size_t count)
+ const u8 *in, size_t count)
{
rd->range = 0xffffffff;
- rd->code = ((u32)le16_to_cpu(in[0]) << 16) | le16_to_cpu(in[1]);
- rd->next = in + 2;
+ rd->code = ((u32)get_unaligned_le16(in) << 16) |
+ get_unaligned_le16(in + 2);
+ rd->next = in + 4;
rd->end = in + count;
}
if (!(rd->range & 0xFFFF0000)) {
rd->range <<= 16;
rd->code <<= 16;
- if (likely(rd->next != rd->end))
- rd->code |= le16_to_cpu(*rd->next++);
+ if (likely(rd->next != rd->end)) {
+ rd->code |= get_unaligned_le16(rd->next);
+ rd->next += sizeof(le16);
+ }
}
/* Based on the probability, calculate the bound between the 0-bit
/* LRU queues for match sources */
u32 recent_lz_offsets[LZMS_NUM_LZ_REPS + 1];
u64 recent_delta_pairs[LZMS_NUM_DELTA_REPS + 1];
- u32 pending_lz_offset = 0;
- u64 pending_delta_pair = 0;
- const u8 *lz_offset_still_pending;
- const u8 *delta_pair_still_pending;
+
+ /* Previous item type: 0 = literal, 1 = LZ match, 2 = delta match.
+ * This is used to handle delayed updates of the LRU queues. Instead of
+ * actually delaying the updates, we can check when decoding each rep
+ * match whether a delayed update needs to be taken into account, and if
+ * so get the match source from slot 'rep_idx + 1' instead of from slot
+ * 'rep_idx'. */
+ unsigned prev_item_type = 0;
/* States and probability entries for item type disambiguation */
u32 main_state = 0;
*
* 1. LZMS-compressed data is a series of 16-bit integers, so the
* compressed data buffer cannot take up an odd number of bytes.
- * 2. To prevent poor performance on some architectures, we require that
- * the compressed data buffer is 2-byte aligned.
- * 3. There must be at least 4 bytes of compressed data, since otherwise
+ * 2. There must be at least 4 bytes of compressed data, since otherwise
* we cannot even initialize the range decoder.
*/
- if ((in_nbytes & 1) || ((uintptr_t)in & 1) || (in_nbytes < 4))
+ if ((in_nbytes & 1) || (in_nbytes < 4))
return -1;
- lzms_range_decoder_init(&rd, in, in_nbytes / sizeof(le16));
+ lzms_range_decoder_init(&rd, in, in_nbytes);
- lzms_input_bitstream_init(&is, in, in_nbytes / sizeof(le16));
+ lzms_input_bitstream_init(&is, in, in_nbytes);
lzms_init_probabilities(&d->probs);
{
/* Literal */
*out_next++ = lzms_decode_literal(d, &is);
+ prev_item_type = 0;
} else if (!lzms_decode_bit(&rd, &match_state,
LZMS_NUM_MATCH_PROBS,
u32 offset;
u32 length;
+ STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3);
+
if (!lzms_decode_bit(&rd, &lz_state,
LZMS_NUM_LZ_PROBS, d->probs.lz))
{
/* Explicit offset */
offset = lzms_decode_lz_offset(d, &is);
+
+ recent_lz_offsets[3] = recent_lz_offsets[2];
+ recent_lz_offsets[2] = recent_lz_offsets[1];
+ recent_lz_offsets[1] = recent_lz_offsets[0];
} else {
/* Repeat offset */
- if (pending_lz_offset != 0 &&
- out_next != lz_offset_still_pending)
- {
- STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3);
- recent_lz_offsets[3] = recent_lz_offsets[2];
- recent_lz_offsets[2] = recent_lz_offsets[1];
- recent_lz_offsets[1] = recent_lz_offsets[0];
- recent_lz_offsets[0] = pending_lz_offset;
- pending_lz_offset = 0;
- }
-
- STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3);
if (!lzms_decode_bit(&rd, &lz_rep_states[0],
LZMS_NUM_LZ_REP_PROBS,
d->probs.lz_rep[0]))
{
- offset = recent_lz_offsets[0];
- recent_lz_offsets[0] = recent_lz_offsets[1];
- recent_lz_offsets[1] = recent_lz_offsets[2];
- recent_lz_offsets[2] = recent_lz_offsets[3];
+ offset = recent_lz_offsets[0 + (prev_item_type & 1)];
+ recent_lz_offsets[0 + (prev_item_type & 1)] = recent_lz_offsets[0];
} else if (!lzms_decode_bit(&rd, &lz_rep_states[1],
LZMS_NUM_LZ_REP_PROBS,
d->probs.lz_rep[1]))
{
- offset = recent_lz_offsets[1];
- recent_lz_offsets[1] = recent_lz_offsets[2];
- recent_lz_offsets[2] = recent_lz_offsets[3];
+ offset = recent_lz_offsets[1 + (prev_item_type & 1)];
+ recent_lz_offsets[1 + (prev_item_type & 1)] = recent_lz_offsets[1];
+ recent_lz_offsets[1] = recent_lz_offsets[0];
} else {
- offset = recent_lz_offsets[2];
- recent_lz_offsets[2] = recent_lz_offsets[3];
+ offset = recent_lz_offsets[2 + (prev_item_type & 1)];
+ recent_lz_offsets[2 + (prev_item_type & 1)] = recent_lz_offsets[2];
+ recent_lz_offsets[2] = recent_lz_offsets[1];
+ recent_lz_offsets[1] = recent_lz_offsets[0];
}
}
-
- if (pending_lz_offset != 0) {
- STATIC_ASSERT(LZMS_NUM_LZ_REPS == 3);
- recent_lz_offsets[3] = recent_lz_offsets[2];
- recent_lz_offsets[2] = recent_lz_offsets[1];
- recent_lz_offsets[1] = recent_lz_offsets[0];
- recent_lz_offsets[0] = pending_lz_offset;
- }
- pending_lz_offset = offset;
+ recent_lz_offsets[0] = offset;
+ prev_item_type = 1;
length = lzms_decode_length(d, &is);
lz_copy(out_next, length, offset, out_end, LZMS_MIN_MATCH_LENGTH);
out_next += length;
-
- lz_offset_still_pending = out_next;
} else {
/* Delta match */
u32 offset;
const u8 *matchptr;
u32 length;
+ u64 pair;
+
+ STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3);
if (!lzms_decode_bit(&rd, &delta_state,
LZMS_NUM_DELTA_PROBS,
/* Explicit offset */
power = lzms_decode_delta_power(d, &is);
raw_offset = lzms_decode_delta_offset(d, &is);
- } else {
- /* Repeat offset */
- u64 val;
- if (pending_delta_pair != 0 &&
- out_next != delta_pair_still_pending)
- {
- STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3);
- recent_delta_pairs[3] = recent_delta_pairs[2];
- recent_delta_pairs[2] = recent_delta_pairs[1];
- recent_delta_pairs[1] = recent_delta_pairs[0];
- recent_delta_pairs[0] = pending_delta_pair;
- pending_delta_pair = 0;
- }
-
- STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3);
+ pair = ((u64)power << 32) | raw_offset;
+ recent_delta_pairs[3] = recent_delta_pairs[2];
+ recent_delta_pairs[2] = recent_delta_pairs[1];
+ recent_delta_pairs[1] = recent_delta_pairs[0];
+ } else {
if (!lzms_decode_bit(&rd, &delta_rep_states[0],
LZMS_NUM_DELTA_REP_PROBS,
d->probs.delta_rep[0]))
{
- val = recent_delta_pairs[0];
- recent_delta_pairs[0] = recent_delta_pairs[1];
- recent_delta_pairs[1] = recent_delta_pairs[2];
- recent_delta_pairs[2] = recent_delta_pairs[3];
+ pair = recent_delta_pairs[0 + (prev_item_type >> 1)];
+ recent_delta_pairs[0 + (prev_item_type >> 1)] = recent_delta_pairs[0];
} else if (!lzms_decode_bit(&rd, &delta_rep_states[1],
LZMS_NUM_DELTA_REP_PROBS,
d->probs.delta_rep[1]))
{
- val = recent_delta_pairs[1];
- recent_delta_pairs[1] = recent_delta_pairs[2];
- recent_delta_pairs[2] = recent_delta_pairs[3];
+ pair = recent_delta_pairs[1 + (prev_item_type >> 1)];
+ recent_delta_pairs[1 + (prev_item_type >> 1)] = recent_delta_pairs[1];
+ recent_delta_pairs[1] = recent_delta_pairs[0];
} else {
- val = recent_delta_pairs[2];
- recent_delta_pairs[2] = recent_delta_pairs[3];
+ pair = recent_delta_pairs[2 + (prev_item_type >> 1)];
+ recent_delta_pairs[2 + (prev_item_type >> 1)] = recent_delta_pairs[2];
+ recent_delta_pairs[2] = recent_delta_pairs[1];
+ recent_delta_pairs[1] = recent_delta_pairs[0];
}
- power = val >> 32;
- raw_offset = (u32)val;
- }
- if (pending_delta_pair != 0) {
- STATIC_ASSERT(LZMS_NUM_DELTA_REPS == 3);
- recent_delta_pairs[3] = recent_delta_pairs[2];
- recent_delta_pairs[2] = recent_delta_pairs[1];
- recent_delta_pairs[1] = recent_delta_pairs[0];
- recent_delta_pairs[0] = pending_delta_pair;
+ power = pair >> 32;
+ raw_offset = (u32)pair;
}
- pending_delta_pair = raw_offset | ((u64)power << 32);
+ recent_delta_pairs[0] = pair;
+ prev_item_type = 2;
length = lzms_decode_length(d, &is);
out_next++;
matchptr++;
} while (--length);
-
- delta_pair_still_pending = out_next;
}
}