+ /* Nope; output the match. */
+
+ match_hdr = seq->adjusted_offset_and_match_hdr & 0x1FF;
+ main_symbol = LZX_NUM_CHARS + match_hdr;
+ adjusted_length = seq->adjusted_length;
+
+ block_data += adjusted_length + LZX_MIN_MATCH_LEN;
+
+ offset_slot = match_hdr / LZX_NUM_LEN_HEADERS;
+ adjusted_offset = seq->adjusted_offset_and_match_hdr >> 9;
+
+ num_extra_bits = lzx_extra_offset_bits[offset_slot];
+ extra_bits = adjusted_offset - lzx_offset_slot_base[offset_slot];
+
+ /* Output the main symbol for the match. */
+ lzx_write_varbits(os, codes->codewords.main[main_symbol],
+ codes->lens.main[main_symbol],
+ LZX_MAX_MAIN_CODEWORD_LEN);
+
+ /* If needed, output the length symbol for the match. */
+
+ if (adjusted_length >= LZX_NUM_PRIMARY_LENS) {
+ lzx_write_varbits(os, codes->codewords.len[adjusted_length - LZX_NUM_PRIMARY_LENS],
+ codes->lens.len[adjusted_length - LZX_NUM_PRIMARY_LENS],
+ LZX_MAX_LEN_CODEWORD_LEN);
+ }
+
+ /* Output the extra offset bits for the match. In aligned
+ * offset blocks, the lowest 3 bits of the adjusted offset are
+ * Huffman-encoded using the aligned offset code, provided that
+ * there are at least extra 3 offset bits required. All other
+ * extra offset bits are output verbatim. */
+
+ if ((adjusted_offset & ones_if_aligned) >= 16) {
+
+ lzx_write_varbits(os, extra_bits >> LZX_NUM_ALIGNED_OFFSET_BITS,
+ num_extra_bits - LZX_NUM_ALIGNED_OFFSET_BITS,
+ 14);
+
+ lzx_write_varbits(os, codes->codewords.aligned[adjusted_offset & LZX_ALIGNED_OFFSET_BITMASK],
+ codes->lens.aligned[adjusted_offset & LZX_ALIGNED_OFFSET_BITMASK],
+ LZX_MAX_ALIGNED_CODEWORD_LEN);
+ } else {
+ lzx_write_varbits(os, extra_bits, num_extra_bits, 17);
+ }
+
+ /* Advance to the next sequence. */
+ seq++;
+ }