4 * Functions used for decompression.
8 * Copyright (C) 2012, 2013 Eric Biggers
10 * This file is part of wimlib, a library for working with WIM files.
12 * wimlib is free software; you can redistribute it and/or modify it under the
13 * terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 3 of the License, or (at your option)
17 * wimlib is distributed in the hope that it will be useful, but WITHOUT ANY
18 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
19 * A PARTICULAR PURPOSE. See the GNU General Public License for more
22 * You should have received a copy of the GNU General Public License
23 * along with wimlib; if not, see http://www.gnu.org/licenses/.
30 #include "wimlib/decompress.h"
31 #include "wimlib/util.h"
37 # define USE_SSE2_FILL
38 # include <emmintrin.h>
40 # define USE_LONG_FILL
45 * make_huffman_decode_table: - Builds a fast huffman decoding table from an
46 * array that gives the length of the codeword for each symbol in the alphabet.
47 * Originally based on code written by David Tritscher (taken the original LZX
48 * decompression code); also heavily modified to add some optimizations used in
49 * the zlib code, as well as more comments; also added some optimizations to
50 * make filling in the decode table entries faster (may not help significantly
53 * @decode_table: The array in which to create the fast huffman decoding
54 * table. It must have a length of at least
55 * (2**table_bits) + 2 * num_syms to guarantee
56 * that there is enough space. Also must be 16-byte
57 * aligned (at least when USE_SSE2_FILL gets defined).
59 * @num_syms: Number of symbols in the alphabet, including symbols
60 * that do not appear in this particular input chunk.
62 * @table_bits: Any symbols with a code length of table_bits or less can
63 * be decoded in one lookup of the table. 2**table_bits
64 * must be greater than or equal to @num_syms if there are
65 * any Huffman codes longer than @table_bits.
67 * @lens: An array of length @num_syms, indexable by symbol, that
68 * gives the length of the Huffman codeword for that
69 * symbol. Because the Huffman tree is in canonical form,
70 * it can be reconstructed by only knowing the length of
71 * the codeword for each symbol. It is assumed, but not
72 * checked, that every length is less than
75 * @max_codeword_len: The longest codeword length allowed in the compression
78 * Returns 0 on success; returns -1 if the length values do not correspond to a
81 * The format of the Huffamn decoding table is as follows. The first (1 <<
82 * table_bits) entries of the table are indexed by chunks of the input of size
83 * @table_bits. If the next Huffman codeword in the input happens to have a
84 * length of exactly @table_bits, the symbol is simply read directly from the
85 * decoding table. Alternatively, if the next Huffman codeword has length _less
86 * than_ @table_bits, the symbol is also read directly from the decode table;
87 * this is possible because every entry in the table that is indexed by an
88 * integer that has the shorter codeword as a binary prefix is filled in with
89 * the appropriate symbol. If a codeword has length n <= table_bits, it will
90 * have 2**(table_bits - n) possible suffixes, and thus that many entries in the
93 * It's a bit more complicated if the next Huffman codeword has length of more
94 * than @table_bits. The table entry indexed by the first @table_bits of that
95 * codeword cannot give the appropriate symbol directly, because that entry is
96 * guaranteed to be referenced by the Huffman codewords of multiple symbols.
97 * And while the LZX compression format does not allow codes longer than 16
98 * bits, a table of size (2 ** 16) = 65536 entries would be too slow to create.
100 * There are several different ways to make it possible to look up the symbols
101 * for codewords longer than @table_bits. One way is to make the entries for
102 * the prefixes of length @table_bits of those entries be pointers to additional
103 * decoding tables that are indexed by some number of additional bits of the
104 * codeword. The technique used here is a bit simpler, however: just store the
105 * needed subtrees of the Huffman tree in the decoding table after the lookup
106 * entries, beginning at index (2**table_bits). Real pointers are replaced by
107 * indices into the decoding table, and symbol entries are distinguished from
108 * pointers by the fact that values less than @num_syms must be symbol values.
111 make_huffman_decode_table(u16 *decode_table, unsigned num_syms,
112 unsigned table_bits, const u8 *lens,
113 unsigned max_codeword_len)
115 unsigned len_counts[max_codeword_len + 1];
116 u16 sorted_syms[num_syms];
117 unsigned offsets[max_codeword_len + 1];
118 const unsigned table_num_entries = 1 << table_bits;
120 unsigned decode_table_pos;
121 void *decode_table_ptr;
123 unsigned codeword_len;
124 unsigned stores_per_loop;
127 const unsigned entries_per_long = sizeof(unsigned long) / sizeof(decode_table[0]);
131 const unsigned entries_per_xmm = sizeof(__m128i) / sizeof(decode_table[0]);
134 wimlib_assert2((uintptr_t)decode_table % DECODE_TABLE_ALIGNMENT == 0);
136 /* accumulate lengths for codes */
137 for (unsigned i = 0; i <= max_codeword_len; i++)
140 for (unsigned sym = 0; sym < num_syms; sym++) {
141 wimlib_assert2(lens[sym] <= max_codeword_len);
142 len_counts[lens[sym]]++;
145 /* check for an over-subscribed or incomplete set of lengths */
147 for (unsigned len = 1; len <= max_codeword_len; len++) {
149 left -= len_counts[len];
150 if (unlikely(left < 0)) { /* over-subscribed */
151 DEBUG("Invalid Huffman code (over-subscribed)");
156 if (unlikely(left != 0)) /* incomplete set */{
157 if (left == 1 << max_codeword_len) {
158 /* Empty code--- okay in XPRESS and LZX */
159 memset(decode_table, 0,
160 table_num_entries * sizeof(decode_table[0]));
163 DEBUG("Invalid Huffman code (incomplete set)");
168 /* Generate offsets into symbol table for each length for sorting */
170 for (unsigned len = 1; len < max_codeword_len; len++)
171 offsets[len + 1] = offsets[len] + len_counts[len];
173 /* Sort symbols primarily by length and secondarily by symbol order.
174 * This is basically a count-sort over the codeword lengths. */
175 for (unsigned sym = 0; sym < num_syms; sym++)
177 sorted_syms[offsets[lens[sym]]++] = sym;
179 /* Fill entries for codewords short enough for a direct mapping. We can
180 * take advantage of the ordering of the codewords, since the Huffman
181 * code is canonical. It must be the case that all the codewords of
182 * some length L numerically precede all the codewords of length L + 1.
183 * Furthermore, if we have 2 symbols A and B with the same codeword
184 * length but symbol A is sorted before symbol B, then then we know that
185 * the codeword for A numerically precedes the codeword for B. */
186 decode_table_ptr = decode_table;
190 /* Fill in the Huffman decode table entries one 128-bit vector at a
191 * time. This is 8 entries per store. */
192 stores_per_loop = (1 << (table_bits - codeword_len)) / entries_per_xmm;
193 for (; stores_per_loop != 0; codeword_len++, stores_per_loop >>= 1) {
194 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
195 for (; sym_idx < end_sym_idx; sym_idx++) {
196 /* Note: unlike in the 'long' version below, the __m128i
197 * type already has __attribute__((may_alias)), so using
198 * it to access the decode table, which is an array of
199 * unsigned shorts, will not violate strict aliasing. */
205 sym = sorted_syms[sym_idx];
207 v = _mm_set1_epi16(sym);
208 p = (__m128i*)decode_table_ptr;
213 decode_table_ptr = p;
216 #endif /* USE_SSE2_FILL */
219 /* Fill in the Huffman decode table entries one 'unsigned long' at a
220 * time. On 32-bit systems this is 2 entries per store, while on 64-bit
221 * systems this is 4 entries per store. */
222 stores_per_loop = (1 << (table_bits - codeword_len)) / entries_per_long;
223 for (; stores_per_loop != 0; codeword_len++, stores_per_loop >>= 1) {
224 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
225 for (; sym_idx < end_sym_idx; sym_idx++) {
227 /* Accessing the array of unsigned shorts as unsigned
228 * longs would violate strict aliasing and would require
229 * compiling the code with -fno-strict-aliasing to
230 * guarantee correctness. To work around this problem,
231 * use the gcc 'may_alias' extension to define a special
232 * unsigned long type that may alias any other in-memory
234 typedef unsigned long __attribute__((may_alias)) aliased_long_t;
241 sym = sorted_syms[sym_idx];
243 BUILD_BUG_ON(sizeof(aliased_long_t) != 4 &&
244 sizeof(aliased_long_t) != 8);
247 if (sizeof(aliased_long_t) >= 4)
249 if (sizeof(aliased_long_t) >= 8) {
250 /* This may produce a compiler warning if an
251 * aliased_long_t is 32 bits, but this won't be
252 * executed unless an aliased_long_t is at least
257 p = (aliased_long_t *)decode_table_ptr;
263 decode_table_ptr = p;
266 #endif /* USE_LONG_FILL */
268 /* Fill in the Huffman decode table entries one 16-bit integer at a
270 stores_per_loop = (1 << (table_bits - codeword_len));
271 for (; stores_per_loop != 0; codeword_len++, stores_per_loop >>= 1) {
272 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
273 for (; sym_idx < end_sym_idx; sym_idx++) {
278 sym = sorted_syms[sym_idx];
280 p = (u16*)decode_table_ptr;
287 decode_table_ptr = p;
291 /* If we've filled in the entire table, we are done. Otherwise, there
292 * are codes longer than table bits that we need to store in the
293 * tree-like structure at the end of the table rather than directly in
294 * the main decode table itself. */
296 decode_table_pos = (u16*)decode_table_ptr - decode_table;
297 if (decode_table_pos != table_num_entries) {
299 unsigned next_free_tree_slot;
300 unsigned cur_codeword;
302 wimlib_assert2(decode_table_pos < table_num_entries);
304 /* Fill in the remaining entries, which correspond to codes
305 * longer than @table_bits.
307 * First, zero out the rest of the entries. This is necessary
308 * so that the entries appear as "unallocated" in the next part.
310 j = decode_table_pos;
313 } while (++j != table_num_entries);
315 /* Assert that 2**table_bits is at least num_syms. If this
316 * wasn't the case, we wouldn't be able to distinguish pointer
317 * entries from symbol entries. */
318 wimlib_assert2(table_num_entries >= num_syms);
321 /* The tree nodes are allocated starting at decode_table[1 <<
322 * table_bits]. Remember that the full size of the table,
323 * including the extra space for the tree nodes, is actually
324 * 2**table_bits + 2 * num_syms slots, while table_num_entries
325 * is only 2**table_bits. */
326 next_free_tree_slot = table_num_entries;
328 /* The current Huffman codeword */
329 cur_codeword = decode_table_pos << 1;
331 /* Go through every codeword of length greater than @table_bits,
332 * primarily in order of codeword length and secondarily in
333 * order of symbol. */
334 wimlib_assert2(codeword_len == table_bits + 1);
335 for (; codeword_len <= max_codeword_len; codeword_len++, cur_codeword <<= 1)
337 unsigned end_sym_idx = sym_idx + len_counts[codeword_len];
338 for (; sym_idx < end_sym_idx; sym_idx++, cur_codeword++) {
339 unsigned sym = sorted_syms[sym_idx];
340 unsigned extra_bits = codeword_len - table_bits;
342 /* index of the current node; find it from the
343 * prefix of the current Huffman codeword. */
344 unsigned node_idx = cur_codeword >> extra_bits;
345 wimlib_assert2(node_idx < table_num_entries);
347 /* Go through each bit of the current Huffman
348 * codeword beyond the prefix of length
349 * @table_bits and walk the tree, allocating any
350 * slots that have not yet been allocated. */
353 /* If the current tree node points to
354 * nowhere but we need to follow it,
355 * allocate a new node for it to point
357 if (decode_table[node_idx] == 0) {
358 decode_table[node_idx] = next_free_tree_slot;
359 decode_table[next_free_tree_slot++] = 0;
360 decode_table[next_free_tree_slot++] = 0;
361 wimlib_assert2(next_free_tree_slot <=
362 table_num_entries + 2 * num_syms);
365 /* Set node_idx to left child */
366 node_idx = decode_table[node_idx];
368 /* Is the next bit 0 or 1? If 0, go left
369 * (already done). If 1, go right by
370 * incrementing node_idx. */
372 node_idx += (cur_codeword >> extra_bits) & 1;
373 } while (extra_bits != 0);
375 /* node_idx is now the index of the leaf entry
376 * into which the actual symbol will go. */
377 decode_table[node_idx] = sym;
379 /* Note: cur_codeword is always incremented at
380 * the end of this loop because this is how
381 * canonical Huffman codes are generated (add 1
382 * for each code, then left shift whenever the
383 * code length increases) */
390 /* Reads a Huffman-encoded symbol from the bistream when the number of remaining
391 * bits is less than the maximum codeword length. */
393 read_huffsym_near_end_of_input(struct input_bitstream *istream,
394 const u16 decode_table[],
400 unsigned bitsleft = istream->bitsleft;
405 if (table_bits > bitsleft) {
408 key_bits = bitstream_peek_bits(istream, key_size) <<
409 (table_bits - key_size);
411 key_size = table_bits;
412 bitsleft -= table_bits;
413 key_bits = bitstream_peek_bits(istream, table_bits);
416 sym = decode_table[key_bits];
417 if (sym >= num_syms) {
418 bitstream_remove_bits(istream, key_size);
421 DEBUG("Input stream exhausted");
424 key_bits = sym + bitstream_peek_bits(istream, 1);
425 bitstream_remove_bits(istream, 1);
427 } while ((sym = decode_table[key_bits]) >= num_syms);
429 bitstream_remove_bits(istream, lens[sym]);