- if (likely(is->next != is->begin))
- is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next)
- << (sizeof(is->bitbuf) * 8 - is->bitsleft - 16);
- is->bitsleft += 16;
+ if (UNALIGNED_ACCESS_IS_FAST && CPU_IS_LITTLE_ENDIAN &&
+ WORDSIZE == 8 && likely((u8 *)is->next - (u8 *)is->begin >= 8))
+ {
+ is->next -= avail >> 4;
+ is->bitbuf |= load_u64_unaligned(is->next) << (avail & 15);
+ is->bitsleft += avail & ~15;
+ } else {
+ if (likely(is->next != is->begin))
+ is->bitbuf |= (bitbuf_t)le16_to_cpu(*--is->next)
+ << (avail - 16);
+ if (likely(is->next != is->begin))
+ is->bitbuf |=(bitbuf_t)le16_to_cpu(*--is->next)
+ << (avail - 32);
+ is->bitsleft += 32;
+ }