X-Git-Url: https://wimlib.net/git/?p=wimlib;a=blobdiff_plain;f=src%2Flzx_compress.c;h=520c07e33572081b7756a9c3622733522569817a;hp=c682e7e9a1f26562f9c7fe63249f08d8f7217112;hb=ab64cfca6f15354ccef5a86441cd9c35898a220a;hpb=558155a9524277661e15a030648c62b32be1cdb6 diff --git a/src/lzx_compress.c b/src/lzx_compress.c index c682e7e9..520c07e3 100644 --- a/src/lzx_compress.c +++ b/src/lzx_compress.c @@ -232,9 +232,10 @@ struct lzx_sequence { u16 adjusted_length; /* If bit 31 is clear, then this field contains the match header in bits - * 0-8 and the match offset minus LZX_OFFSET_ADJUSTMENT in bits 9-30. - * Otherwise, this sequence's literal run was the last literal run in - * the block, so there is no match that follows it. */ + * 0-8, and either the match offset plus LZX_OFFSET_ADJUSTMENT or a + * recent offset code in bits 9-30. Otherwise (if bit 31 is set), this + * sequence's literal run was the last literal run in the block, so + * there is no match that follows it. */ u32 adjusted_offset_and_match_hdr; }; @@ -331,15 +332,6 @@ lzx_lru_queue_push(struct lzx_lru_queue queue, u32 offset) }; } -/* Pop a match offset off the front (most recently used) end of the queue. */ -static inline u32 -lzx_lru_queue_pop(struct lzx_lru_queue *queue_p) -{ - u32 offset = queue_p->R & LZX_QUEUE64_OFFSET_MASK; - queue_p->R >>= LZX_QUEUE64_OFFSET_SHIFT; - return offset; -} - /* Swap a match offset to the front of the queue. */ static inline struct lzx_lru_queue lzx_lru_queue_swap(struct lzx_lru_queue queue, unsigned idx) @@ -911,23 +903,30 @@ lzx_write_sequences(struct lzx_output_bitstream *os, int block_type, unsigned lit1 = block_data[1]; unsigned lit2 = block_data[2]; unsigned lit3 = block_data[3]; - lzx_add_bits(os, codes->codewords.main[lit0], codes->lens.main[lit0]); - lzx_add_bits(os, codes->codewords.main[lit1], codes->lens.main[lit1]); - lzx_add_bits(os, codes->codewords.main[lit2], codes->lens.main[lit2]); - lzx_add_bits(os, codes->codewords.main[lit3], codes->lens.main[lit3]); + lzx_add_bits(os, codes->codewords.main[lit0], + codes->lens.main[lit0]); + lzx_add_bits(os, codes->codewords.main[lit1], + codes->lens.main[lit1]); + lzx_add_bits(os, codes->codewords.main[lit2], + codes->lens.main[lit2]); + lzx_add_bits(os, codes->codewords.main[lit3], + codes->lens.main[lit3]); lzx_flush_bits(os, 4 * MAIN_CODEWORD_LIMIT); block_data += 4; litrunlen -= 4; } if (litrunlen--) { unsigned lit = *block_data++; - lzx_add_bits(os, codes->codewords.main[lit], codes->lens.main[lit]); + lzx_add_bits(os, codes->codewords.main[lit], + codes->lens.main[lit]); if (litrunlen--) { unsigned lit = *block_data++; - lzx_add_bits(os, codes->codewords.main[lit], codes->lens.main[lit]); + lzx_add_bits(os, codes->codewords.main[lit], + codes->lens.main[lit]); if (litrunlen--) { unsigned lit = *block_data++; - lzx_add_bits(os, codes->codewords.main[lit], codes->lens.main[lit]); + lzx_add_bits(os, codes->codewords.main[lit], + codes->lens.main[lit]); lzx_flush_bits(os, 3 * MAIN_CODEWORD_LIMIT); } else { lzx_flush_bits(os, 2 * MAIN_CODEWORD_LIMIT); @@ -940,7 +939,8 @@ lzx_write_sequences(struct lzx_output_bitstream *os, int block_type, /* 32-bit: write 1 literal at a time. */ do { unsigned lit = *block_data++; - lzx_add_bits(os, codes->codewords.main[lit], codes->lens.main[lit]); + lzx_add_bits(os, codes->codewords.main[lit], + codes->lens.main[lit]); lzx_flush_bits(os, MAIN_CODEWORD_LIMIT); } while (--litrunlen); } @@ -980,8 +980,10 @@ lzx_write_sequences(struct lzx_output_bitstream *os, int block_type, /* If needed, output the length symbol for the match. */ if (adjusted_length >= LZX_NUM_PRIMARY_LENS) { - lzx_add_bits(os, codes->codewords.len[adjusted_length - LZX_NUM_PRIMARY_LENS], - codes->lens.len[adjusted_length - LZX_NUM_PRIMARY_LENS]); + lzx_add_bits(os, codes->codewords.len[adjusted_length - + LZX_NUM_PRIMARY_LENS], + codes->lens.len[adjusted_length - + LZX_NUM_PRIMARY_LENS]); if (!CAN_BUFFER(MAX_MATCH_BITS)) lzx_flush_bits(os, LENGTH_CODEWORD_LIMIT); } @@ -999,8 +1001,10 @@ lzx_write_sequences(struct lzx_output_bitstream *os, int block_type, if (!CAN_BUFFER(MAX_MATCH_BITS)) lzx_flush_bits(os, 14); - lzx_add_bits(os, codes->codewords.aligned[adjusted_offset & LZX_ALIGNED_OFFSET_BITMASK], - codes->lens.aligned[adjusted_offset & LZX_ALIGNED_OFFSET_BITMASK]); + lzx_add_bits(os, codes->codewords.aligned[adjusted_offset & + LZX_ALIGNED_OFFSET_BITMASK], + codes->lens.aligned[adjusted_offset & + LZX_ALIGNED_OFFSET_BITMASK]); if (!CAN_BUFFER(MAX_MATCH_BITS)) lzx_flush_bits(os, ALIGNED_CODEWORD_LIMIT); } else { @@ -1637,13 +1641,15 @@ lzx_find_min_cost_path(struct lzx_compressor * const restrict c, static void lzx_compute_match_costs(struct lzx_compressor *c) { - unsigned num_offset_slots = (c->num_main_syms - LZX_NUM_CHARS) / LZX_NUM_LEN_HEADERS; + unsigned num_offset_slots = (c->num_main_syms - LZX_NUM_CHARS) / + LZX_NUM_LEN_HEADERS; struct lzx_costs *costs = &c->costs; for (unsigned offset_slot = 0; offset_slot < num_offset_slots; offset_slot++) { u32 extra_cost = (u32)lzx_extra_offset_bits[offset_slot] * LZX_BIT_COST; - unsigned main_symbol = LZX_NUM_CHARS + (offset_slot * LZX_NUM_LEN_HEADERS); + unsigned main_symbol = LZX_NUM_CHARS + (offset_slot * + LZX_NUM_LEN_HEADERS); unsigned i; #if LZX_CONSIDER_ALIGNED_COSTS @@ -1821,7 +1827,9 @@ lzx_compress_near_optimal(struct lzx_compressor *c, if (unlikely(max_len > in_end - in_next)) { max_len = in_end - in_next; nice_len = min(max_len, nice_len); - if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) { + if (unlikely(max_len < + BT_MATCHFINDER_REQUIRED_NBYTES)) + { in_next++; cache_ptr->length = 0; cache_ptr++; @@ -1830,7 +1838,8 @@ lzx_compress_near_optimal(struct lzx_compressor *c, } /* Check for matches. */ - lz_matchptr = CALL_BT_MF(is_16_bit, c, bt_matchfinder_get_matches, + lz_matchptr = CALL_BT_MF(is_16_bit, c, + bt_matchfinder_get_matches, in_begin, in_next - in_begin, max_len, @@ -1861,14 +1870,17 @@ lzx_compress_near_optimal(struct lzx_compressor *c, if (unlikely(max_len > in_end - in_next)) { max_len = in_end - in_next; nice_len = min(max_len, nice_len); - if (unlikely(max_len < BT_MATCHFINDER_REQUIRED_NBYTES)) { + if (unlikely(max_len < + BT_MATCHFINDER_REQUIRED_NBYTES)) + { in_next++; cache_ptr->length = 0; cache_ptr++; continue; } } - CALL_BT_MF(is_16_bit, c, bt_matchfinder_skip_position, + CALL_BT_MF(is_16_bit, c, + bt_matchfinder_skip_position, in_begin, in_next - in_begin, max_len, @@ -2027,7 +2039,8 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os, /* Find the longest match at the current position. */ - cur_len = CALL_HC_MF(is_16_bit, c, hc_matchfinder_longest_match, + cur_len = CALL_HC_MF(is_16_bit, c, + hc_matchfinder_longest_match, in_begin, in_next - in_begin, 2, @@ -2093,7 +2106,8 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os, nice_len = min(max_len, nice_len); } - next_len = CALL_HC_MF(is_16_bit, c, hc_matchfinder_longest_match, + next_len = CALL_HC_MF(is_16_bit, c, + hc_matchfinder_longest_match, in_begin, in_next - in_begin, cur_len - 2, @@ -2153,7 +2167,8 @@ lzx_compress_lazy(struct lzx_compressor *c, struct lzx_output_bitstream *os, lzx_record_match(c, cur_len, cur_offset_data, recent_offsets, is_16_bit, &litrunlen, &next_seq); - in_next = CALL_HC_MF(is_16_bit, c, hc_matchfinder_skip_positions, + in_next = CALL_HC_MF(is_16_bit, c, + hc_matchfinder_skip_positions, in_begin, in_next - in_begin, in_end - in_begin,