]> wimlib.net Git - wimlib/blobdiff - src/lzx_compress.c
mount_image.c: add fallback definitions of RENAME_* constants
[wimlib] / src / lzx_compress.c
index ac87a807bd9a3b726e891c99b1a0150ec2cba868..02498bf5f6b7b6cc51b6563a7b1fdf2892ffcaa0 100644 (file)
@@ -18,7 +18,7 @@
  * details.
  *
  * You should have received a copy of the GNU Lesser General Public License
- * along with this file; if not, see http://www.gnu.org/licenses/.
+ * along with this file; if not, see https://www.gnu.org/licenses/.
  */
 
 
 #include "wimlib/compress_common.h"
 #include "wimlib/compressor_ops.h"
 #include "wimlib/error.h"
-#include "wimlib/lz_extend.h"
 #include "wimlib/lzx_common.h"
 #include "wimlib/unaligned.h"
 #include "wimlib/util.h"
@@ -288,7 +287,7 @@ struct lzx_sequence {
        u32 adjusted_offset_and_mainsym;
 #define SEQ_MAINSYM_BITS       10
 #define SEQ_MAINSYM_MASK       (((u32)1 << SEQ_MAINSYM_BITS) - 1)
-} _aligned_attribute(8);
+} __attribute__((aligned(8)));
 
 /*
  * This structure represents a byte position in the input buffer and a node in
@@ -334,7 +333,7 @@ struct lzx_optimum_node {
 #  define OPTIMUM_GAP_MATCH 0x80000000
 #endif
 
-} _aligned_attribute(8);
+} __attribute__((aligned(8)));
 
 /* The cost model for near-optimal parsing */
 struct lzx_costs {
@@ -1227,7 +1226,7 @@ lzx_flush_block(struct lzx_compressor *c, struct lzx_output_bitstream *os,
  * but rather we combine many symbols into a single "observation type".  For
  * literals we only look at the high bits and low bits, and for matches we only
  * look at whether the match is long or not.  The assumption is that for typical
- * "real" data, places that are good block boundaries will tend to be noticable
+ * "real" data, places that are good block boundaries will tend to be noticeable
  * based only on changes in these aggregate frequencies, without looking for
  * subtle differences in individual symbols.  For example, a change from ASCII
  * bytes to non-ASCII bytes, or from few matches (generally less compressible)
@@ -1316,7 +1315,7 @@ lzx_should_end_block(struct lzx_block_split_stats *stats)
  */
 struct lzx_lru_queue {
        u64 R;
-} _aligned_attribute(8);
+} __attribute__((aligned(8)));
 
 #define LZX_QUEUE_OFFSET_SHIFT 21
 #define LZX_QUEUE_OFFSET_MASK  (((u64)1 << LZX_QUEUE_OFFSET_SHIFT) - 1)
@@ -2273,7 +2272,7 @@ lzx_compress_near_optimal(struct lzx_compressor * restrict c,
                        } else {
                                /* Don't search for matches at this position. */
                                CALL_BT_MF(is_16_bit, c,
-                                          bt_matchfinder_skip_position,
+                                          bt_matchfinder_skip_byte,
                                           in_begin,
                                           in_next - in_begin,
                                           nice_len,
@@ -2569,7 +2568,7 @@ lzx_compress_lazy(struct lzx_compressor * restrict c,
                        cur_len = CALL_HC_MF(is_16_bit, c,
                                             hc_matchfinder_longest_match,
                                             in_begin,
-                                            in_next - in_begin,
+                                            in_next,
                                             2,
                                             max_len,
                                             nice_len,
@@ -2646,7 +2645,7 @@ lzx_compress_lazy(struct lzx_compressor * restrict c,
                        next_len = CALL_HC_MF(is_16_bit, c,
                                              hc_matchfinder_longest_match,
                                              in_begin,
-                                             in_next - in_begin,
+                                             in_next,
                                              cur_len - 2,
                                              max_len,
                                              nice_len,
@@ -2707,13 +2706,14 @@ lzx_compress_lazy(struct lzx_compressor * restrict c,
                        lzx_choose_match(c, cur_len, cur_adjusted_offset,
                                         recent_offsets, is_16_bit,
                                         &litrunlen, &next_seq);
-                       in_next = CALL_HC_MF(is_16_bit, c,
-                                            hc_matchfinder_skip_positions,
-                                            in_begin,
-                                            in_next - in_begin,
-                                            in_end - in_begin,
-                                            skip_len,
-                                            next_hashes);
+                       CALL_HC_MF(is_16_bit, c,
+                                  hc_matchfinder_skip_bytes,
+                                  in_begin,
+                                  in_next,
+                                  in_end,
+                                  skip_len,
+                                  next_hashes);
+                       in_next += skip_len;
 
                        /* Keep going until it's time to end the block. */
                } while (in_next < in_max_block_end &&