6#include "force_inline.h"
14#if (defined(__BYTE_ORDER__) && \
15 (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)) || \
16 defined(__LITTLE_ENDIAN__) || defined(__ARMEL__) || defined(__MIPSEL__) || \
17 defined(__AARCH64EL__) || defined(__amd64__) || defined(__i386__) || \
18 defined(_M_IX86) || defined(_M_X64) || defined(_M_AMD64) || \
20#define NATIVE_LITTLE_ENDIAN
24static FORCE_INLINE uint32_t load32(
const void* src) {
25#if defined(NATIVE_LITTLE_ENDIAN)
27 memcpy(&w, src,
sizeof w);
30 const uint8_t* p = (
const uint8_t*)src;
32 w |= (uint32_t)(*p++) << 8;
33 w |= (uint32_t)(*p++) << 16;
34 w |= (uint32_t)(*p++) << 24;
39static FORCE_INLINE uint64_t load64_native(
const void* src) {
41 memcpy(&w, src,
sizeof w);
45static FORCE_INLINE uint64_t load64(
const void* src) {
46#if defined(NATIVE_LITTLE_ENDIAN)
47 return load64_native(src);
49 const uint8_t* p = (
const uint8_t*)src;
51 w |= (uint64_t)(*p++) << 8;
52 w |= (uint64_t)(*p++) << 16;
53 w |= (uint64_t)(*p++) << 24;
54 w |= (uint64_t)(*p++) << 32;
55 w |= (uint64_t)(*p++) << 40;
56 w |= (uint64_t)(*p++) << 48;
57 w |= (uint64_t)(*p++) << 56;
62static FORCE_INLINE
void store32(
void* dst, uint32_t w) {
63#if defined(NATIVE_LITTLE_ENDIAN)
64 memcpy(dst, &w,
sizeof w);
66 uint8_t* p = (uint8_t*)dst;
77static FORCE_INLINE
void store64_native(
void* dst, uint64_t w) {
78 memcpy(dst, &w,
sizeof w);
81static FORCE_INLINE
void store64(
void* dst, uint64_t w) {
82#if defined(NATIVE_LITTLE_ENDIAN)
83 store64_native(dst, w);
85 uint8_t* p = (uint8_t*)dst;