Remove LZX_ASSERT() (now almost unused)
[wimlib] / include / wimlib / lzx_common.h
1 /*
2  * lzx_common.h
3  *
4  * Declarations shared between LZX compression and decompression.
5  */
6
7 #ifndef _LZX_COMMON_H
8 #define _LZX_COMMON_H
9
10 #include "wimlib/bitops.h"
11 #include "wimlib/lzx_constants.h"
12 #include "wimlib/types.h"
13
14 extern const u32 lzx_offset_slot_base[LZX_MAX_OFFSET_SLOTS + 1];
15
16 extern const u8 lzx_extra_offset_bits[LZX_MAX_OFFSET_SLOTS];
17
18 /*
19  * Return the offset slot for the specified match offset.
20  *
21  * This returns the smallest i such that:
22  *
23  *      offset + LZX_OFFSET_ADJUSTMENT >= lzx_offset_slot_base[i]
24  *
25  * However, the actual implementation below takes advantage of the regularity of
26  * the offset slot bases to calculate the slot directly from the adjusted offset
27  * without actually looking at the array.
28  */
29 static inline unsigned
30 lzx_get_offset_slot(u32 offset)
31 {
32         u32 adjusted_offset = offset + LZX_OFFSET_ADJUSTMENT;
33         if (adjusted_offset >= 196608) {
34                 return (adjusted_offset >> 17) + 34;
35         } else {
36                 unsigned mssb_idx = fls32(adjusted_offset);
37                 return (mssb_idx << 1) |
38                         ((adjusted_offset >> (mssb_idx - 1)) & 1);
39         }
40 }
41
42 static inline unsigned
43 lzx_main_symbol_for_literal(unsigned literal)
44 {
45         return literal;
46 }
47
48 static inline unsigned
49 lzx_main_symbol_for_match(unsigned offset_slot, unsigned len_header)
50 {
51         return LZX_NUM_CHARS + (offset_slot * LZX_NUM_LEN_HEADERS) + len_header;
52 }
53
54 extern unsigned
55 lzx_get_window_order(size_t max_bufsize);
56
57 extern unsigned
58 lzx_get_num_offset_slots(unsigned window_order);
59
60 extern unsigned
61 lzx_get_num_main_syms(unsigned window_order);
62
63 extern void
64 lzx_do_e8_preprocessing(u8 *data, u32 size);
65
66 extern void
67 lzx_undo_e8_preprocessing(u8 *data, u32 size);
68
69 #endif /* _LZX_COMMON_H */