6 * Copyright (C) 2013, 2014 Eric Biggers
8 * This file is free software; you can redistribute it and/or modify it under
9 * the terms of the GNU Lesser General Public License as published by the Free
10 * Software Foundation; either version 3 of the License, or (at your option) any
13 * This file is distributed in the hope that it will be useful, but WITHOUT
14 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15 * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
18 * You should have received a copy of the GNU Lesser General Public License
19 * along with this file; if not, see http://www.gnu.org/licenses/.
23 * This is a decompressor for the LZMS compression format used by Microsoft.
24 * This format is not documented, but it is one of the formats supported by the
25 * compression API available in Windows 8, and as of Windows 8 it is one of the
26 * formats that can be used in WIM files.
28 * This decompressor only implements "raw" decompression, which decompresses a
29 * single LZMS-compressed block. This behavior is the same as that of
30 * Decompress() in the Windows 8 compression API when using a compression handle
31 * created with CreateDecompressor() with the Algorithm parameter specified as
32 * COMPRESS_ALGORITHM_LZMS | COMPRESS_RAW. Presumably, non-raw LZMS data
33 * is a container format from which the locations and sizes (both compressed and
34 * uncompressed) of the constituent blocks can be determined.
36 * An LZMS-compressed block must be read in 16-bit little endian units from both
37 * directions. One logical bitstream starts at the front of the block and
38 * proceeds forwards. Another logical bitstream starts at the end of the block
39 * and proceeds backwards. Bits read from the forwards bitstream constitute
40 * binary range-encoded data, whereas bits read from the backwards bitstream
41 * constitute Huffman-encoded symbols or verbatim bits. For both bitstreams,
42 * the ordering of the bits within the 16-bit coding units is such that the
43 * first bit is the high-order bit and the last bit is the low-order bit.
45 * From these two logical bitstreams, an LZMS decompressor can reconstitute the
46 * series of items that make up the LZMS data representation. Each such item
47 * may be a literal byte or a match. Matches may be either traditional LZ77
48 * matches or "delta" matches, either of which can have its offset encoded
49 * explicitly or encoded via a reference to a recently used (repeat) offset.
51 * A traditional LZ match consists of a length and offset; it asserts that the
52 * sequence of bytes beginning at the current position and extending for the
53 * length is exactly equal to the equal-length sequence of bytes at the offset
54 * back in the data buffer. On the other hand, a delta match consists of a
55 * length, raw offset, and power. It asserts that the sequence of bytes
56 * beginning at the current position and extending for the length is equal to
57 * the bytewise sum of the two equal-length sequences of bytes (2**power) and
58 * (raw_offset * 2**power) bytes before the current position, minus bytewise the
59 * sequence of bytes beginning at (2**power + raw_offset * 2**power) bytes
60 * before the current position. Although not generally as useful as traditional
61 * LZ matches, delta matches can be helpful on some types of data. Both LZ and
62 * delta matches may overlap with the current position; in fact, the minimum
63 * offset is 1, regardless of match length.
65 * For LZ matches, up to 3 repeat offsets are allowed, similar to some other
66 * LZ-based formats such as LZX and LZMA. They must updated in an LRU fashion,
67 * except for a quirk: inserting anything to the front of the queue must be
68 * delayed by one LZMS item. The reason for this is presumably that there is
69 * almost no reason to code the same match offset twice in a row, since you
70 * might as well have coded a longer match at that offset. For this same
71 * reason, it also is a requirement that when an offset in the queue is used,
72 * that offset is removed from the queue immediately (and made pending for
73 * front-insertion after the following decoded item), and everything to the
74 * right is shifted left one queue slot. This creates a need for an "overflow"
75 * fourth entry in the queue, even though it is only possible to decode
76 * references to the first 3 entries at any given time. The queue must be
77 * initialized to the offsets {1, 2, 3, 4}.
79 * Repeat delta matches are handled similarly, but for them there are two queues
80 * updated in lock-step: one for powers and one for raw offsets. The power
81 * queue must be initialized to {0, 0, 0, 0}, and the raw offset queue must be
82 * initialized to {1, 2, 3, 4}.
84 * Bits from the binary range decoder must be used to disambiguate item types.
85 * The range decoder must hold two state variables: the range, which must
86 * initially be set to 0xffffffff, and the current code, which must initially be
87 * set to the first 32 bits read from the forwards bitstream. The range must be
88 * maintained above 0xffff; when it falls below 0xffff, both the range and code
89 * must be left-shifted by 16 bits and the low 16 bits of the code must be
90 * filled in with the next 16 bits from the forwards bitstream.
92 * To decode each bit, the binary range decoder requires a probability that is
93 * logically a real number between 0 and 1. Multiplying this probability by the
94 * current range and taking the floor gives the bound between the 0-bit region of
95 * the range and the 1-bit region of the range. However, in LZMS, probabilities
96 * are restricted to values of n/64 where n is an integer is between 1 and 63
97 * inclusively, so the implementation may use integer operations instead.
98 * Following calculation of the bound, if the current code is in the 0-bit
99 * region, the new range becomes the current code and the decoded bit is 0;
100 * otherwise, the bound must be subtracted from both the range and the code, and
101 * the decoded bit is 1. More information about range coding can be found at
102 * https://en.wikipedia.org/wiki/Range_encoding. Furthermore, note that the
103 * LZMA format also uses range coding and has public domain code available for
106 * The probability used to range-decode each bit must be taken from a table, of
107 * which one instance must exist for each distinct context in which a
108 * range-decoded bit is needed. At each call of the range decoder, the
109 * appropriate probability must be obtained by indexing the appropriate
110 * probability table with the last 4 (in the context disambiguating literals
111 * from matches), 5 (in the context disambiguating LZ matches from delta
112 * matches), or 6 (in all other contexts) bits recently range-decoded in that
113 * context, ordered such that the most recently decoded bit is the low-order bit
116 * Furthermore, each probability entry itself is variable, as its value must be
117 * maintained as n/64 where n is the number of 0 bits in the most recently
118 * decoded 64 bits with that same entry. This allows the compressed
119 * representation to adapt to the input and use fewer bits to represent the most
120 * likely data; note that LZMA uses a similar scheme. Initially, the most
121 * recently 64 decoded bits for each probability entry are assumed to be
122 * 0x0000000055555555 (high order to low order); therefore, all probabilities
123 * are initially 48/64. During the course of decoding, each probability may be
124 * updated to as low as 0/64 (as a result of reading many consecutive 1 bits
125 * with that entry) or as high as 64/64 (as a result of reading many consecutive
126 * 0 bits with that entry); however, probabilities of 0/64 and 64/64 cannot be
127 * used as-is but rather must be adjusted to 1/64 and 63/64, respectively,
128 * before being used for range decoding.
130 * Representations of the LZMS items themselves must be read from the backwards
131 * bitstream. For this, there are 5 different Huffman codes used:
133 * - The literal code, used for decoding literal bytes. Each of the 256
134 * symbols represents a literal byte. This code must be rebuilt whenever
135 * 1024 symbols have been decoded with it.
137 * - The LZ offset code, used for decoding the offsets of standard LZ77
138 * matches. Each symbol represents an offset slot, which corresponds to a
139 * base value and some number of extra bits which must be read and added to
140 * the base value to reconstitute the full offset. The number of symbols in
141 * this code is the number of offset slots needed to represent all possible
142 * offsets in the uncompressed block. This code must be rebuilt whenever
143 * 1024 symbols have been decoded with it.
145 * - The length code, used for decoding length symbols. Each of the 54 symbols
146 * represents a length slot, which corresponds to a base value and some
147 * number of extra bits which must be read and added to the base value to
148 * reconstitute the full length. This code must be rebuilt whenever 512
149 * symbols have been decoded with it.
151 * - The delta offset code, used for decoding the offsets of delta matches.
152 * Each symbol corresponds to an offset slot, which corresponds to a base
153 * value and some number of extra bits which must be read and added to the
154 * base value to reconstitute the full offset. The number of symbols in this
155 * code is equal to the number of symbols in the LZ offset code. This code
156 * must be rebuilt whenever 1024 symbols have been decoded with it.
158 * - The delta power code, used for decoding the powers of delta matches. Each
159 * of the 8 symbols corresponds to a power. This code must be rebuilt
160 * whenever 512 symbols have been decoded with it.
162 * Initially, each Huffman code must be built assuming that each symbol in that
163 * code has frequency 1. Following that, each code must be rebuilt each time a
164 * certain number of symbols, as noted above, has been decoded with it. The
165 * symbol frequencies for a code must be halved after each rebuild of that code;
166 * this makes the codes adapt to the more recent data.
168 * Like other compression formats such as XPRESS, LZX, and DEFLATE, the LZMS
169 * format requires that all Huffman codes be constructed in canonical form.
170 * This form requires that same-length codewords be lexicographically ordered
171 * the same way as the corresponding symbols and that all shorter codewords
172 * lexicographically precede longer codewords. Such a code can be constructed
173 * directly from codeword lengths.
175 * Even with the canonical code restriction, the same frequencies can be used to
176 * construct multiple valid Huffman codes. Therefore, the decompressor needs to
177 * construct the right one. Specifically, the LZMS format requires that the
178 * Huffman code be constructed as if the well-known priority queue algorithm is
179 * used and frequency ties are always broken in favor of leaf nodes.
181 * Codewords in LZMS are guaranteed to not exceed 15 bits. The format otherwise
182 * places no restrictions on codeword length. Therefore, the Huffman code
183 * construction algorithm that a correct LZMS decompressor uses need not
184 * implement length-limited code construction. But if it does (e.g. by virtue
185 * of being shared among multiple compression algorithms), the details of how it
186 * does so are unimportant, provided that the maximum codeword length parameter
187 * is set to at least 15 bits.
189 * After all LZMS items have been decoded, the data must be postprocessed to
190 * translate absolute address encoded in x86 instructions into their original
191 * relative addresses.
193 * Details omitted above can be found in the code. Note that in the absence of
194 * an official specification there is no guarantee that this decompressor
195 * handles all possible cases.
204 #include "wimlib/compress_common.h"
205 #include "wimlib/decompressor_ops.h"
206 #include "wimlib/decompress_common.h"
207 #include "wimlib/error.h"
208 #include "wimlib/lzms.h"
209 #include "wimlib/util.h"
211 /* The TABLEBITS values can be changed; they only affect decoding speed. */
212 #define LZMS_LITERAL_TABLEBITS 10
213 #define LZMS_LENGTH_TABLEBITS 10
214 #define LZMS_LZ_OFFSET_TABLEBITS 10
215 #define LZMS_DELTA_OFFSET_TABLEBITS 10
216 #define LZMS_DELTA_POWER_TABLEBITS 8
218 struct lzms_range_decoder {
220 /* The relevant part of the current range. Although the logical range
221 * for range decoding is a very large integer, only a small portion
222 * matters at any given time, and it can be normalized (shifted left)
223 * whenever it gets too small. */
226 /* The current position in the range encoded by the portion of the input
230 /* Pointer to the next little-endian 16-bit integer in the compressed
231 * input data (reading forwards). */
234 /* Pointer to the end of the compressed input data. */
238 typedef u64 bitbuf_t;
240 struct lzms_input_bitstream {
242 /* Holding variable for bits that have been read from the compressed
243 * data. The bit ordering is high to low. */
246 /* Number of bits currently held in @bitbuf. */
249 /* Pointer to the one past the next little-endian 16-bit integer in the
250 * compressed input data (reading backwards). */
253 /* Pointer to the beginning of the compressed input data. */
257 /* Bookkeeping information for an adaptive Huffman code */
258 struct lzms_huffman_rebuild_info {
259 unsigned num_syms_until_rebuild;
260 unsigned rebuild_freq;
269 struct lzms_decompressor {
271 /* 'last_target_usages' is in union with everything else because it is
272 * only used for postprocessing. */
276 struct lzms_range_decoder rd;
277 struct lzms_input_bitstream is;
279 u32 recent_lz_offsets[LZMS_NUM_RECENT_OFFSETS + 1];
280 u64 recent_delta_offsets[LZMS_NUM_RECENT_OFFSETS + 1];
281 u32 pending_lz_offset;
282 u64 pending_delta_offset;
283 const u8 *lz_offset_still_pending;
284 const u8 *delta_offset_still_pending;
289 struct lzms_probability_entry main_prob_entries[
290 LZMS_NUM_MAIN_STATES];
293 struct lzms_probability_entry match_prob_entries[
294 LZMS_NUM_MATCH_STATES];
297 struct lzms_probability_entry lz_match_prob_entries[
298 LZMS_NUM_LZ_MATCH_STATES];
300 u32 delta_match_state;
301 struct lzms_probability_entry delta_match_prob_entries[
302 LZMS_NUM_DELTA_MATCH_STATES];
304 u32 lz_repeat_match_states[LZMS_NUM_RECENT_OFFSETS - 1];
305 struct lzms_probability_entry lz_repeat_match_prob_entries[
306 LZMS_NUM_RECENT_OFFSETS - 1][LZMS_NUM_LZ_REPEAT_MATCH_STATES];
308 u32 delta_repeat_match_states[LZMS_NUM_RECENT_OFFSETS - 1];
309 struct lzms_probability_entry delta_repeat_match_prob_entries[
310 LZMS_NUM_RECENT_OFFSETS - 1][LZMS_NUM_DELTA_REPEAT_MATCH_STATES];
312 /* Huffman decoding */
314 u16 literal_decode_table[(1 << LZMS_LITERAL_TABLEBITS) +
315 (2 * LZMS_NUM_LITERAL_SYMS)]
316 _aligned_attribute(DECODE_TABLE_ALIGNMENT);
317 u32 literal_freqs[LZMS_NUM_LITERAL_SYMS];
318 struct lzms_huffman_rebuild_info literal_rebuild_info;
320 u16 length_decode_table[(1 << LZMS_LENGTH_TABLEBITS) +
321 (2 * LZMS_NUM_LENGTH_SYMS)]
322 _aligned_attribute(DECODE_TABLE_ALIGNMENT);
323 u32 length_freqs[LZMS_NUM_LENGTH_SYMS];
324 struct lzms_huffman_rebuild_info length_rebuild_info;
326 u16 lz_offset_decode_table[(1 << LZMS_LZ_OFFSET_TABLEBITS) +
327 ( 2 * LZMS_MAX_NUM_OFFSET_SYMS)]
328 _aligned_attribute(DECODE_TABLE_ALIGNMENT);
329 u32 lz_offset_freqs[LZMS_MAX_NUM_OFFSET_SYMS];
330 struct lzms_huffman_rebuild_info lz_offset_rebuild_info;
332 u16 delta_offset_decode_table[(1 << LZMS_DELTA_OFFSET_TABLEBITS) +
333 (2 * LZMS_MAX_NUM_OFFSET_SYMS)]
334 _aligned_attribute(DECODE_TABLE_ALIGNMENT);
335 u32 delta_offset_freqs[LZMS_MAX_NUM_OFFSET_SYMS];
336 struct lzms_huffman_rebuild_info delta_offset_rebuild_info;
338 u16 delta_power_decode_table[(1 << LZMS_DELTA_POWER_TABLEBITS) +
339 (2 * LZMS_NUM_DELTA_POWER_SYMS)]
340 _aligned_attribute(DECODE_TABLE_ALIGNMENT);
341 u32 delta_power_freqs[LZMS_NUM_DELTA_POWER_SYMS];
342 struct lzms_huffman_rebuild_info delta_power_rebuild_info;
344 u32 codewords[LZMS_MAX_NUM_SYMS];
345 u8 lens[LZMS_MAX_NUM_SYMS];
349 s32 last_target_usages[65536];
354 /* Initialize the input bitstream @is to read backwards from the compressed data
355 * buffer @in that is @count 16-bit integers long. */
357 lzms_input_bitstream_init(struct lzms_input_bitstream *is,
358 const le16 *in, size_t count)
362 is->next = in + count;
366 /* Ensure that at least @num_bits bits are in the bitbuffer variable.
367 * @num_bits cannot be more than 32. */
369 lzms_ensure_bits(struct lzms_input_bitstream *is, const unsigned num_bits)
371 if (is->bitsleft >= num_bits)
374 if (likely(is->next != is->begin))
375 is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next)
376 << (sizeof(is->bitbuf) * 8 - is->bitsleft - 16);
379 if (likely(is->next != is->begin))
380 is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next)
381 << (sizeof(is->bitbuf) * 8 - is->bitsleft - 16);
385 /* Get @num_bits bits from the bitbuffer variable. */
386 static inline bitbuf_t
387 lzms_peek_bits(struct lzms_input_bitstream *is, unsigned num_bits)
389 if (unlikely(num_bits == 0))
391 return is->bitbuf >> (sizeof(is->bitbuf) * 8 - num_bits);
394 /* Remove @num_bits bits from the bitbuffer variable. */
396 lzms_remove_bits(struct lzms_input_bitstream *is, unsigned num_bits)
398 is->bitbuf <<= num_bits;
399 is->bitsleft -= num_bits;
402 /* Remove and return @num_bits bits from the bitbuffer variable. */
403 static inline bitbuf_t
404 lzms_pop_bits(struct lzms_input_bitstream *is, unsigned num_bits)
406 bitbuf_t bits = lzms_peek_bits(is, num_bits);
407 lzms_remove_bits(is, num_bits);
411 /* Read @num_bits bits from the input bitstream. */
412 static inline bitbuf_t
413 lzms_read_bits(struct lzms_input_bitstream *is, unsigned num_bits)
415 lzms_ensure_bits(is, num_bits);
416 return lzms_pop_bits(is, num_bits);
419 /* Initialize the range decoder @rd to read forwards from the compressed data
420 * buffer @in that is @count 16-bit integers long. */
422 lzms_range_decoder_init(struct lzms_range_decoder *rd,
423 const le16 *in, size_t count)
425 rd->range = 0xffffffff;
426 rd->code = ((u32)le16_to_cpu(in[0]) << 16) | le16_to_cpu(in[1]);
428 rd->end = in + count;
431 /* Decode and return the next bit from the range decoder.
433 * @prob is the chance out of LZMS_PROBABILITY_MAX that the next bit is 0.
436 lzms_range_decoder_decode_bit(struct lzms_range_decoder *rd, u32 prob)
440 /* Ensure the range has at least 16 bits of precision. */
441 if (rd->range <= 0xffff) {
444 if (likely(rd->next != rd->end))
445 rd->code |= le16_to_cpu(*rd->next++);
448 /* Based on the probability, calculate the bound between the 0-bit
449 * region and the 1-bit region of the range. */
450 bound = (rd->range >> LZMS_PROBABILITY_BITS) * prob;
452 if (rd->code < bound) {
453 /* Current code is in the 0-bit region of the range. */
457 /* Current code is in the 1-bit region of the range. */
464 /* Decode and return the next bit from the range decoder. This wraps around
465 * lzms_range_decoder_decode_bit() to handle using and updating the appropriate
466 * state and probability entry. */
468 lzms_range_decode_bit(struct lzms_range_decoder *rd,
469 u32 *state_p, u32 num_states,
470 struct lzms_probability_entry prob_entries[])
472 struct lzms_probability_entry *prob_entry;
476 /* Load the probability entry corresponding to the current state. */
477 prob_entry = &prob_entries[*state_p];
479 /* Get the probability that the next bit is 0. */
480 prob = lzms_get_probability(prob_entry);
482 /* Decode the next bit. */
483 bit = lzms_range_decoder_decode_bit(rd, prob);
485 /* Update the state and probability entry based on the decoded bit. */
486 *state_p = ((*state_p << 1) | bit) & (num_states - 1);
487 lzms_update_probability_entry(prob_entry, bit);
489 /* Return the decoded bit. */
494 lzms_decode_main_bit(struct lzms_decompressor *d)
496 return lzms_range_decode_bit(&d->rd, &d->main_state,
497 LZMS_NUM_MAIN_STATES,
498 d->main_prob_entries);
502 lzms_decode_match_bit(struct lzms_decompressor *d)
504 return lzms_range_decode_bit(&d->rd, &d->match_state,
505 LZMS_NUM_MATCH_STATES,
506 d->match_prob_entries);
510 lzms_decode_lz_match_bit(struct lzms_decompressor *d)
512 return lzms_range_decode_bit(&d->rd, &d->lz_match_state,
513 LZMS_NUM_LZ_MATCH_STATES,
514 d->lz_match_prob_entries);
518 lzms_decode_delta_match_bit(struct lzms_decompressor *d)
520 return lzms_range_decode_bit(&d->rd, &d->delta_match_state,
521 LZMS_NUM_DELTA_MATCH_STATES,
522 d->delta_match_prob_entries);
526 lzms_decode_lz_repeat_match_bit(struct lzms_decompressor *d, int idx)
528 return lzms_range_decode_bit(&d->rd, &d->lz_repeat_match_states[idx],
529 LZMS_NUM_LZ_REPEAT_MATCH_STATES,
530 d->lz_repeat_match_prob_entries[idx]);
534 lzms_decode_delta_repeat_match_bit(struct lzms_decompressor *d, int idx)
536 return lzms_range_decode_bit(&d->rd, &d->delta_repeat_match_states[idx],
537 LZMS_NUM_DELTA_REPEAT_MATCH_STATES,
538 d->delta_repeat_match_prob_entries[idx]);
542 lzms_init_huffman_rebuild_info(struct lzms_huffman_rebuild_info *info,
543 unsigned rebuild_freq,
544 u16 *decode_table, unsigned table_bits,
545 u32 *freqs, u32 *codewords, u8 *lens,
548 info->num_syms_until_rebuild = 1;
549 info->rebuild_freq = rebuild_freq;
550 info->decode_table = decode_table;
551 info->table_bits = table_bits;
553 info->codewords = codewords;
555 info->num_syms = num_syms;
556 lzms_init_symbol_frequencies(freqs, num_syms);
559 #define NUM_SYMBOL_BITS 10
560 #define SYMBOL_MASK ((1 << NUM_SYMBOL_BITS) - 1)
563 heapify_subtree(u32 A[], unsigned length, unsigned subtree_idx)
570 parent_idx = subtree_idx;
571 while ((child_idx = parent_idx * 2) <= length) {
572 if (child_idx < length && A[child_idx + 1] > A[child_idx])
574 if (v >= A[child_idx])
576 A[parent_idx] = A[child_idx];
577 parent_idx = child_idx;
583 heapify_array(u32 A[], unsigned length)
585 for (unsigned subtree_idx = length / 2; subtree_idx >= 1; subtree_idx--)
586 heapify_subtree(A, length, subtree_idx);
590 heapsort(u32 A[], unsigned length)
594 heapify_array(A, length);
596 while (length >= 2) {
597 swap(A[1], A[length]);
599 heapify_subtree(A, length, 1);
604 lzms_sort_symbols_by_frequency(u32 * const restrict A,
605 u32 * const restrict freqs,
606 const unsigned num_syms)
608 unsigned num_counters = (DIV_ROUND_UP(num_syms, 4) + 3) & ~3;
609 unsigned counters[num_counters];
610 unsigned cumulative_count;
612 memset(counters, 0, sizeof(counters));
614 for (unsigned sym = 0; sym < num_syms; sym++)
615 counters[min(freqs[sym], num_counters - 1)]++;
617 cumulative_count = 0;
618 for (unsigned i = 0; i < num_counters; i++) {
619 unsigned count = counters[i];
620 counters[i] = cumulative_count;
621 cumulative_count += count;
624 for (unsigned sym = 0; sym < num_syms; sym++) {
625 A[counters[min(freqs[sym], num_counters - 1)]++] =
626 sym | (freqs[sym] << NUM_SYMBOL_BITS);
627 freqs[sym] = (freqs[sym] >> 1) + 1;
630 heapsort(&A[counters[num_counters - 2]],
631 counters[num_counters - 1] - counters[num_counters - 2]);
635 lzms_build_huffman_tree(u32 * const restrict A, const unsigned num_syms)
646 (b == e || (A[i] >> NUM_SYMBOL_BITS) <= (A[b] >> NUM_SYMBOL_BITS)))
652 (b == e || (A[i] >> NUM_SYMBOL_BITS) <= (A[b] >> NUM_SYMBOL_BITS)))
656 freq_shifted = (A[m] & ~SYMBOL_MASK) + (A[n] & ~SYMBOL_MASK);
657 A[m] = (A[m] & SYMBOL_MASK) | (e << NUM_SYMBOL_BITS);
658 A[n] = (A[n] & SYMBOL_MASK) | (e << NUM_SYMBOL_BITS);
659 A[e] = (A[e] & SYMBOL_MASK) | freq_shifted;
661 } while (num_syms - e > 1);
665 lzms_generate_length_counts(u32 * const restrict A,
666 unsigned * const restrict len_counts,
667 const unsigned num_syms)
670 for (unsigned len = 0; len <= LZMS_MAX_CODEWORD_LEN; len++)
675 A[num_syms - 2] &= SYMBOL_MASK;
677 for (int node = (int)num_syms - 3; node >= 0; node--) {
679 unsigned parent = A[node] >> NUM_SYMBOL_BITS;
680 unsigned parent_depth = A[parent] >> NUM_SYMBOL_BITS;
681 unsigned depth = parent_depth + 1;
682 unsigned len = depth;
684 A[node] = (A[node] & SYMBOL_MASK) | (depth << NUM_SYMBOL_BITS);
686 len_counts[len + 1] += 2;
691 lzms_generate_lengths(u32 * const restrict A,
692 unsigned * const restrict len_counts,
693 u8 * const restrict lens)
696 for (unsigned len = LZMS_MAX_CODEWORD_LEN; len >= 1; len--) {
697 unsigned count = len_counts[len];
699 lens[A[i++] & SYMBOL_MASK] = len;
704 lzms_sort_symbols_by_codeword_length(u32 * const restrict A,
705 u8 * const restrict lens,
706 unsigned * const restrict len_counts,
707 const unsigned num_syms)
709 unsigned offsets[LZMS_MAX_CODEWORD_LEN + 1];
712 for (unsigned len = 1; len < LZMS_MAX_CODEWORD_LEN; len++)
713 offsets[len + 1] = offsets[len] + len_counts[len];
715 for (unsigned sym = 0; sym < num_syms; sym++)
716 A[offsets[lens[sym]]++] = sym;
719 /* Construct a direct mapping entry in the lookup table. */
720 #define MAKE_DIRECT_ENTRY(symbol, length) ((symbol) | ((length) << 11))
723 lzms_fill_decode_table(u16 * const restrict decode_table,
724 const unsigned table_bits,
725 const unsigned num_syms,
726 const unsigned * const restrict len_counts,
727 const u32 * const restrict sorted_syms)
729 u16 *decode_table_ptr = decode_table;
730 const unsigned table_num_entries = 1U << table_bits;
731 unsigned sym_idx = 0;
732 unsigned codeword_len = 1;
733 unsigned stores_per_loop;
734 unsigned decode_table_pos;
736 stores_per_loop = (1 << (table_bits - codeword_len)) / 4;
737 for (; stores_per_loop != 0; codeword_len++, stores_per_loop >>= 1) {
738 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
739 for (; sym_idx < end_sym_idx; sym_idx++) {
740 u16 entry = MAKE_DIRECT_ENTRY(sorted_syms[sym_idx], codeword_len);
744 u64 *p = (u64*)decode_table_ptr;
745 unsigned n = stores_per_loop;
749 decode_table_ptr = (u16*)p;
752 stores_per_loop = (1 << (table_bits - codeword_len));
753 for (; stores_per_loop != 0; codeword_len++, stores_per_loop >>= 1) {
754 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
755 for (; sym_idx < end_sym_idx; sym_idx++) {
756 u16 entry = MAKE_DIRECT_ENTRY(sorted_syms[sym_idx], codeword_len);
757 u16 *p = decode_table_ptr;
758 unsigned n = stores_per_loop;
762 decode_table_ptr = p;
766 decode_table_pos = (u16*)decode_table_ptr - decode_table;
767 if (decode_table_pos != table_num_entries) {
769 unsigned next_free_tree_slot;
770 unsigned cur_codeword;
772 j = decode_table_pos;
775 } while (++j != table_num_entries);
777 next_free_tree_slot = table_num_entries;
778 for (cur_codeword = decode_table_pos << 1;
779 codeword_len <= LZMS_MAX_CODEWORD_LEN;
780 codeword_len++, cur_codeword <<= 1)
782 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
783 for (; sym_idx < end_sym_idx; sym_idx++, cur_codeword++) {
784 unsigned sym = sorted_syms[sym_idx];
785 unsigned extra_bits = codeword_len - table_bits;
786 unsigned node_idx = cur_codeword >> extra_bits;
788 if (decode_table[node_idx] == 0) {
789 decode_table[node_idx] =
790 next_free_tree_slot | 0xC000;
791 decode_table[next_free_tree_slot++] = 0;
792 decode_table[next_free_tree_slot++] = 0;
794 node_idx = decode_table[node_idx] & 0x3FFF;
796 node_idx += (cur_codeword >> extra_bits) & 1;
797 } while (extra_bits != 0);
798 decode_table[node_idx] = sym;
805 lzms_rebuild_huffman_code(struct lzms_huffman_rebuild_info *info)
807 u32 * const A = info->codewords;
808 unsigned len_counts[LZMS_MAX_CODEWORD_LEN + 1];
810 info->num_syms_until_rebuild = info->rebuild_freq;
812 lzms_sort_symbols_by_frequency(A, info->freqs, info->num_syms);
814 lzms_build_huffman_tree(A, info->num_syms);
816 lzms_generate_length_counts(A, len_counts, info->num_syms);
818 lzms_generate_lengths(A, len_counts, info->lens);
820 lzms_sort_symbols_by_codeword_length(A, info->lens, len_counts, info->num_syms);
822 lzms_fill_decode_table(info->decode_table, info->table_bits,
823 info->num_syms, len_counts, A);
827 lzms_rebuild_huffman_code_if_needed(struct lzms_huffman_rebuild_info *info)
829 if (unlikely(--info->num_syms_until_rebuild == 0))
830 lzms_rebuild_huffman_code(info);
833 static inline unsigned
834 lzms_decode_huffman_symbol(struct lzms_input_bitstream *is,
835 u16 decode_table[], unsigned table_bits,
836 struct lzms_huffman_rebuild_info *rebuild_info)
842 lzms_rebuild_huffman_code_if_needed(rebuild_info);
844 lzms_ensure_bits(is, LZMS_MAX_CODEWORD_LEN);
846 /* Index the decode table by the next table_bits bits of the input. */
847 key_bits = lzms_peek_bits(is, table_bits);
848 entry = decode_table[key_bits];
849 if (likely(entry < 0xC000)) {
850 /* Fast case: The decode table directly provided the symbol and
851 * codeword length. The low 11 bits are the symbol, and the
852 * high 5 bits are the codeword length. */
853 lzms_remove_bits(is, entry >> 11);
856 /* Slow case: The codeword for the symbol is longer than
857 * table_bits, so the symbol does not have an entry directly in
858 * the first (1 << table_bits) entries of the decode table.
859 * Traverse the appropriate binary tree bit-by-bit in order to
860 * decode the symbol. */
861 lzms_remove_bits(is, table_bits);
863 key_bits = (entry & 0x3FFF) + lzms_pop_bits(is, 1);
864 } while ((entry = decode_table[key_bits]) >= 0xC000);
868 /* Tally and return the decoded symbol. */
869 rebuild_info->freqs[sym]++;
874 lzms_decode_literal(struct lzms_decompressor *d)
876 return lzms_decode_huffman_symbol(&d->is,
877 d->literal_decode_table,
878 LZMS_LITERAL_TABLEBITS,
879 &d->literal_rebuild_info);
883 lzms_decode_length(struct lzms_decompressor *d)
885 unsigned slot = lzms_decode_huffman_symbol(&d->is,
886 d->length_decode_table,
887 LZMS_LENGTH_TABLEBITS,
888 &d->length_rebuild_info);
889 u32 length = lzms_length_slot_base[slot];
890 unsigned num_extra_bits = lzms_extra_length_bits[slot];
891 /* Usually most lengths are short and there are no extra bits. */
893 length += lzms_read_bits(&d->is, num_extra_bits);
898 lzms_decode_lz_offset(struct lzms_decompressor *d)
900 unsigned slot = lzms_decode_huffman_symbol(&d->is,
901 d->lz_offset_decode_table,
902 LZMS_LZ_OFFSET_TABLEBITS,
903 &d->lz_offset_rebuild_info);
904 return lzms_offset_slot_base[slot] +
905 lzms_read_bits(&d->is, lzms_extra_offset_bits[slot]);
909 lzms_decode_delta_offset(struct lzms_decompressor *d)
911 unsigned slot = lzms_decode_huffman_symbol(&d->is,
912 d->delta_offset_decode_table,
913 LZMS_DELTA_OFFSET_TABLEBITS,
914 &d->delta_offset_rebuild_info);
915 return lzms_offset_slot_base[slot] +
916 lzms_read_bits(&d->is, lzms_extra_offset_bits[slot]);
920 lzms_decode_delta_power(struct lzms_decompressor *d)
922 return lzms_decode_huffman_symbol(&d->is,
923 d->delta_power_decode_table,
924 LZMS_DELTA_POWER_TABLEBITS,
925 &d->delta_power_rebuild_info);
928 /* Decode the series of literals and matches from the LZMS-compressed data.
929 * Return 0 if successful or -1 if the compressed data is invalid. */
931 lzms_decode_items(struct lzms_decompressor * const restrict d,
932 u8 * const restrict out, const size_t out_nbytes)
935 u8 * const out_end = out + out_nbytes;
937 BUILD_BUG_ON(4 != LZMS_NUM_RECENT_OFFSETS + 1);
939 while (out_next != out_end) {
941 if (!lzms_decode_main_bit(d)) {
944 *out_next++ = lzms_decode_literal(d);
946 } else if (!lzms_decode_match_bit(d)) {
953 if (d->pending_lz_offset != 0 && out_next != d->lz_offset_still_pending) {
954 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
955 d->recent_lz_offsets[3] = d->recent_lz_offsets[2];
956 d->recent_lz_offsets[2] = d->recent_lz_offsets[1];
957 d->recent_lz_offsets[1] = d->recent_lz_offsets[0];
958 d->recent_lz_offsets[0] = d->pending_lz_offset;
959 d->pending_lz_offset = 0;
962 if (!lzms_decode_lz_match_bit(d)) {
963 /* Explicit offset */
964 offset = lzms_decode_lz_offset(d);
968 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
969 if (!lzms_decode_lz_repeat_match_bit(d, 0)) {
970 offset = d->recent_lz_offsets[0];
971 d->recent_lz_offsets[0] = d->recent_lz_offsets[1];
972 d->recent_lz_offsets[1] = d->recent_lz_offsets[2];
973 d->recent_lz_offsets[2] = d->recent_lz_offsets[3];
974 } else if (!lzms_decode_lz_repeat_match_bit(d, 1)) {
975 offset = d->recent_lz_offsets[1];
976 d->recent_lz_offsets[1] = d->recent_lz_offsets[2];
977 d->recent_lz_offsets[2] = d->recent_lz_offsets[3];
979 offset = d->recent_lz_offsets[2];
980 d->recent_lz_offsets[2] = d->recent_lz_offsets[3];
984 if (d->pending_lz_offset != 0) {
985 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
986 d->recent_lz_offsets[3] = d->recent_lz_offsets[2];
987 d->recent_lz_offsets[2] = d->recent_lz_offsets[1];
988 d->recent_lz_offsets[1] = d->recent_lz_offsets[0];
989 d->recent_lz_offsets[0] = d->pending_lz_offset;
991 d->pending_lz_offset = offset;
993 length = lzms_decode_length(d);
995 if (unlikely(length > out_end - out_next))
997 if (unlikely(offset > out_next - out))
1000 lz_copy(out_next, length, offset, out_end, LZMS_MIN_MATCH_LEN);
1003 d->lz_offset_still_pending = out_next;
1008 u32 raw_offset, offset1, offset2, offset;
1009 const u8 *matchptr1, *matchptr2, *matchptr;
1012 if (d->pending_delta_offset != 0 && out_next != d->delta_offset_still_pending) {
1013 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
1014 d->recent_delta_offsets[3] = d->recent_delta_offsets[2];
1015 d->recent_delta_offsets[2] = d->recent_delta_offsets[1];
1016 d->recent_delta_offsets[1] = d->recent_delta_offsets[0];
1017 d->recent_delta_offsets[0] = d->pending_delta_offset;
1018 d->pending_delta_offset = 0;
1021 if (!lzms_decode_delta_match_bit(d)) {
1022 /* Explicit offset */
1023 power = lzms_decode_delta_power(d);
1024 raw_offset = lzms_decode_delta_offset(d);
1029 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
1030 if (!lzms_decode_delta_repeat_match_bit(d, 0)) {
1031 val = d->recent_delta_offsets[0];
1032 d->recent_delta_offsets[0] = d->recent_delta_offsets[1];
1033 d->recent_delta_offsets[1] = d->recent_delta_offsets[2];
1034 d->recent_delta_offsets[2] = d->recent_delta_offsets[3];
1035 } else if (!lzms_decode_delta_repeat_match_bit(d, 1)) {
1036 val = d->recent_delta_offsets[1];
1037 d->recent_delta_offsets[1] = d->recent_delta_offsets[2];
1038 d->recent_delta_offsets[2] = d->recent_delta_offsets[3];
1040 val = d->recent_delta_offsets[2];
1041 d->recent_delta_offsets[2] = d->recent_delta_offsets[3];
1044 raw_offset = (u32)val;
1047 if (d->pending_delta_offset != 0) {
1048 BUILD_BUG_ON(LZMS_NUM_RECENT_OFFSETS != 3);
1049 d->recent_delta_offsets[3] = d->recent_delta_offsets[2];
1050 d->recent_delta_offsets[2] = d->recent_delta_offsets[1];
1051 d->recent_delta_offsets[1] = d->recent_delta_offsets[0];
1052 d->recent_delta_offsets[0] = d->pending_delta_offset;
1053 d->pending_delta_offset = 0;
1055 d->pending_delta_offset = raw_offset | ((u64)power << 32);
1057 length = lzms_decode_length(d);
1059 offset1 = (u32)1 << power;
1060 offset2 = raw_offset << power;
1061 offset = offset1 + offset2;
1063 /* raw_offset<<power overflowed? */
1064 if (unlikely((offset2 >> power) != raw_offset))
1067 /* offset1+offset2 overflowed? */
1068 if (unlikely(offset < offset2))
1071 if (unlikely(length > out_end - out_next))
1074 if (unlikely(offset > out_next - out))
1077 matchptr1 = out_next - offset1;
1078 matchptr2 = out_next - offset2;
1079 matchptr = out_next - offset;
1082 *out_next++ = *matchptr1++ + *matchptr2++ - *matchptr++;
1085 d->delta_offset_still_pending = out_next;
1092 lzms_init_decompressor(struct lzms_decompressor *d, const void *in,
1093 size_t in_nbytes, unsigned num_offset_slots)
1095 for (int i = 0; i < LZMS_NUM_RECENT_OFFSETS + 1; i++) {
1096 d->recent_lz_offsets[i] = i + 1;
1097 d->recent_delta_offsets[i] = i + 1;
1099 d->pending_lz_offset = 0;
1100 d->pending_delta_offset = 0;
1102 /* Range decoding */
1104 lzms_range_decoder_init(&d->rd, in, in_nbytes / sizeof(le16));
1107 lzms_init_probability_entries(d->main_prob_entries, LZMS_NUM_MAIN_STATES);
1110 lzms_init_probability_entries(d->match_prob_entries, LZMS_NUM_MATCH_STATES);
1112 d->lz_match_state = 0;
1113 lzms_init_probability_entries(d->lz_match_prob_entries, LZMS_NUM_LZ_MATCH_STATES);
1115 d->delta_match_state = 0;
1116 lzms_init_probability_entries(d->delta_match_prob_entries, LZMS_NUM_DELTA_MATCH_STATES);
1118 for (int i = 0; i < LZMS_NUM_RECENT_OFFSETS - 1; i++) {
1119 d->lz_repeat_match_states[i] = 0;
1120 lzms_init_probability_entries(d->lz_repeat_match_prob_entries[i],
1121 LZMS_NUM_LZ_REPEAT_MATCH_STATES);
1123 d->delta_repeat_match_states[i] = 0;
1124 lzms_init_probability_entries(d->delta_repeat_match_prob_entries[i],
1125 LZMS_NUM_DELTA_REPEAT_MATCH_STATES);
1128 /* Huffman decoding */
1130 lzms_input_bitstream_init(&d->is, in, in_nbytes / sizeof(le16));
1132 lzms_init_huffman_rebuild_info(&d->literal_rebuild_info,
1133 LZMS_LITERAL_CODE_REBUILD_FREQ,
1134 d->literal_decode_table,
1135 LZMS_LITERAL_TABLEBITS,
1139 LZMS_NUM_LITERAL_SYMS);
1141 lzms_init_huffman_rebuild_info(&d->length_rebuild_info,
1142 LZMS_LENGTH_CODE_REBUILD_FREQ,
1143 d->length_decode_table,
1144 LZMS_LENGTH_TABLEBITS,
1148 LZMS_NUM_LENGTH_SYMS);
1150 lzms_init_huffman_rebuild_info(&d->lz_offset_rebuild_info,
1151 LZMS_LZ_OFFSET_CODE_REBUILD_FREQ,
1152 d->lz_offset_decode_table,
1153 LZMS_LZ_OFFSET_TABLEBITS,
1159 lzms_init_huffman_rebuild_info(&d->delta_offset_rebuild_info,
1160 LZMS_DELTA_OFFSET_CODE_REBUILD_FREQ,
1161 d->delta_offset_decode_table,
1162 LZMS_DELTA_OFFSET_TABLEBITS,
1163 d->delta_offset_freqs,
1168 lzms_init_huffman_rebuild_info(&d->delta_power_rebuild_info,
1169 LZMS_DELTA_POWER_CODE_REBUILD_FREQ,
1170 d->delta_power_decode_table,
1171 LZMS_DELTA_POWER_TABLEBITS,
1172 d->delta_power_freqs,
1175 LZMS_NUM_DELTA_POWER_SYMS);
1179 lzms_create_decompressor(size_t max_bufsize, void **d_ret)
1181 struct lzms_decompressor *d;
1183 if (max_bufsize > LZMS_MAX_BUFFER_SIZE)
1184 return WIMLIB_ERR_INVALID_PARAM;
1186 d = ALIGNED_MALLOC(sizeof(struct lzms_decompressor),
1187 DECODE_TABLE_ALIGNMENT);
1189 return WIMLIB_ERR_NOMEM;
1195 /* Decompress @in_nbytes bytes of LZMS-compressed data at @in and write the
1196 * uncompressed data, which had original size @out_nbytes, to @out. Return 0 if
1197 * successful or -1 if the compressed data is invalid. */
1199 lzms_decompress(const void *in, size_t in_nbytes, void *out, size_t out_nbytes,
1202 struct lzms_decompressor *d = _d;
1205 * Requirements on the compressed data:
1207 * 1. LZMS-compressed data is a series of 16-bit integers, so the
1208 * compressed data buffer cannot take up an odd number of bytes.
1209 * 2. To prevent poor performance on some architectures, we require that
1210 * the compressed data buffer is 2-byte aligned.
1211 * 3. There must be at least 4 bytes of compressed data, since otherwise
1212 * we cannot even initialize the range decoder.
1214 if ((in_nbytes & 1) || ((uintptr_t)in & 1) || (in_nbytes < 4))
1217 lzms_init_decompressor(d, in, in_nbytes,
1218 lzms_get_num_offset_slots(out_nbytes));
1220 if (lzms_decode_items(d, out, out_nbytes))
1223 lzms_x86_filter(out, out_nbytes, d->last_target_usages, true);
1228 lzms_free_decompressor(void *_d)
1230 struct lzms_decompressor *d = _d;
1235 const struct decompressor_ops lzms_decompressor_ops = {
1236 .create_decompressor = lzms_create_decompressor,
1237 .decompress = lzms_decompress,
1238 .free_decompressor = lzms_free_decompressor,