const struct lzx_codes *codes)
{
const struct lzx_sequence *seq = sequences;
- u32 ones_if_aligned = 0 - (block_type == LZX_BLOCKTYPE_ALIGNED);
+ unsigned min_aligned_offset_slot;
+
+ if (block_type == LZX_BLOCKTYPE_ALIGNED)
+ min_aligned_offset_slot = LZX_MIN_ALIGNED_OFFSET_SLOT;
+ else
+ min_aligned_offset_slot = LZX_MAX_OFFSET_SLOTS;
for (;;) {
/* Output the next sequence. */
extra_bits = adjusted_offset - (lzx_offset_slot_base[offset_slot] +
LZX_OFFSET_ADJUSTMENT);
- #define MAX_MATCH_BITS (MAIN_CODEWORD_LIMIT + LENGTH_CODEWORD_LIMIT + \
- 14 + ALIGNED_CODEWORD_LIMIT)
+ #define MAX_MATCH_BITS (MAIN_CODEWORD_LIMIT + \
+ LENGTH_CODEWORD_LIMIT + \
+ LZX_MAX_NUM_EXTRA_BITS - \
+ LZX_NUM_ALIGNED_OFFSET_BITS + \
+ ALIGNED_CODEWORD_LIMIT)
/* Verify optimization is enabled on 64-bit */
STATIC_ASSERT(WORDBITS < 64 || CAN_BUFFER(MAX_MATCH_BITS));
* there are at least extra 3 offset bits required. All other
* extra offset bits are output verbatim. */
- if ((adjusted_offset & ones_if_aligned) >= 16) {
+ if (offset_slot >= min_aligned_offset_slot) {
lzx_add_bits(os, extra_bits >> LZX_NUM_ALIGNED_OFFSET_BITS,
num_extra_bits - LZX_NUM_ALIGNED_OFFSET_BITS);
if (!CAN_BUFFER(MAX_MATCH_BITS))
- lzx_flush_bits(os, 14);
+ lzx_flush_bits(os, LZX_MAX_NUM_EXTRA_BITS -
+ LZX_NUM_ALIGNED_OFFSET_BITS);
lzx_add_bits(os, codes->codewords.aligned[adjusted_offset &
LZX_ALIGNED_OFFSET_BITMASK],
if (!CAN_BUFFER(MAX_MATCH_BITS))
lzx_flush_bits(os, ALIGNED_CODEWORD_LIMIT);
} else {
- STATIC_ASSERT(CAN_BUFFER(17));
+ STATIC_ASSERT(CAN_BUFFER(LZX_MAX_NUM_EXTRA_BITS));
lzx_add_bits(os, extra_bits, num_extra_bits);
if (!CAN_BUFFER(MAX_MATCH_BITS))
- lzx_flush_bits(os, 17);
+ lzx_flush_bits(os, LZX_MAX_NUM_EXTRA_BITS);
}
if (CAN_BUFFER(MAX_MATCH_BITS))
/* Record a match. */
/* Tally the aligned offset symbol if needed. */
- if (adjusted_offset >= 16)
+ if (adjusted_offset >= LZX_MIN_ALIGNED_OFFSET + LZX_OFFSET_ADJUSTMENT)
c->freqs.aligned[adjusted_offset & LZX_ALIGNED_OFFSET_BITMASK]++;
/* Record the adjusted length. */
u32 cost;
#if CONSIDER_ALIGNED_COSTS
- if (offset >= 16 - LZX_OFFSET_ADJUSTMENT)
+ if (offset >= LZX_MIN_ALIGNED_OFFSET)
base_cost += c->costs.aligned[adjusted_offset &
LZX_ALIGNED_OFFSET_BITMASK];
#endif
unsigned i;
#if CONSIDER_ALIGNED_COSTS
- if (offset_slot >= 8)
+ if (offset_slot >= LZX_MIN_ALIGNED_OFFSET_SLOT)
extra_cost -= LZX_NUM_ALIGNED_OFFSET_BITS * BIT_COST;
#endif