X-Git-Url: https://git.distorted.org.uk/~mdw/mLib/blobdiff_plain/897c58ad7408d8001e63fcc2ca8e598c9811a539..a23bab96cb9eb7a869eb260336e5837e9c63d69b:/utils/bits.h diff --git a/utils/bits.h b/utils/bits.h index 6602e43..656eee5 100644 --- a/utils/bits.h +++ b/utils/bits.h @@ -40,6 +40,10 @@ # include #endif +#ifndef MLIB_COMPILER_H +# include "compiler.h" +#endif + /*----- Decide on some types ----------------------------------------------*/ /* --- Make GNU C shut up --- */ @@ -314,8 +318,178 @@ typedef unsigned char octet, uint8; } while (0) #endif +/* --- Endianness swapping --- */ + +#if GCC_VERSION_P(4, 8) +# define ENDSWAP16(x) ((uint16)__builtin_bswap16(x)) +#endif +#if GCC_VERSION_P(4, 3) +# define ENDSWAP32(x) ((uint32)__builtin_bswap32(x)) +#endif +#if GCC_VERSION_P(4, 3) && defined(HAVE_UINT64) +# define ENDSWAP64(x) ((uint64)__builtin_bswap64(x)) +#endif + +#ifndef ENDSWAP8 +# define ENDSWAP8(x) U8(x) +#endif +#ifndef ENDSWAP16 +# define ENDSWAP16(x) \ + ((((uint16)(x) >> 8)&0xff) | \ + (((uint16)(x)&0xff) << 8)) +#endif +#ifndef ENDSWAP24 +# define ENDSWAP24(x) \ + ((((uint24)(x) >> 16)&0xff) | \ + ((uint24)(x)&0xff00) | \ + ((uint24)((x)&0xff) << 16)) +#endif +#ifndef ENDSWAP32 +# define ENDSWAP32(x) \ + (ENDSWAP16(((uint32)(x) >> 16)&0xffff) | \ + ((uint32)ENDSWAP16((x)&0xffff) << 16)) +#endif +#if defined(HAVE_UINT64) && !defined(ENDSWAP64) +# define ENDSWAP64(x) \ + (ENDSWAP32(((uint64)(x) >> 32)&0xffffffff) | \ + ((uint64)ENDSWAP32((x)&0xffffffff) << 32)) +#endif +#ifdef HAVE_UINT64 +# define ENDSWAP64_(z, x) \ + ((z).i = ENDSWAP64((x).i)) +#else +# define ENDSWAP64_(z, x) \ + ((z).lo = ENDSWAP32((x).hi), \ + (z).hi = ENDSWAP32((x).lo)) +#endif + +#define MLIB_LITTLE_ENDIAN 1234 +#define MLIB_BIG_ENDIAN 4321 +#if defined(__ORDER_LITTLE_ENDIAN__) && \ + __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +# define MLIB_BYTE_ORDER MLIB_LITTLE_ENDIAN +#elif defined(__ORDER_BIG_ENDIAN__) && \ + __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ +# define MLIB_BYTE_ORDER MLIB_BIG_ENDIAN +#endif + +#if MLIB_BYTE_ORDER == MLIB_LITTLE_ENDIAN +# define HTOL16(x) (x) +# define LTOH16(x) (x) +# define HTOB16(x) ENDSWAP16(x) +# define BTOH16(x) ENDSWAP16(x) +# define HTOL24(x) (x) +# define LTOH24(x) (x) +# define HTOB24(x) ENDSWAP24(x) +# define BTOH24(x) ENDSWAP24(x) +# define HTOL32(x) (x) +# define LTOH32(x) (x) +# define HTOB32(x) ENDSWAP32(x) +# define BTOH32(x) ENDSWAP32(x) +# ifdef HAVE_UINT64 +# define HTOL64(x) (x) +# define LTOH64(x) (x) +# define HTOB64(x) ENDSWAP64(x) +# define BTOH64(x) ENDSWAP64(x) +# endif +# define HTOL64_(z, x) ASSIGN64(z, x) +# define LTOH64_(z, x) ASSIGN64(z, x) +# define HTOB64_(z, x) ENDSWAP64_(z, x) +# define BTOH64_(z, x) ENDSWAP64_(z, x) +#elif MLIB_BYTE_ORDER == MLIB_BIG_ENDIAN +# define HTOL16(x) ENDSWAP16(x) +# define LTOH16(x) ENDSWAP16(x) +# define HTOB16(x) (x) +# define BTOH16(x) (x) +# define HTOL24(x) ENDSWAP24(x) +# define LTOH24(x) ENDSWAP24(x) +# define HTOB24(x) (x) +# define BTOH24(x) (x) +# define HTOL32(x) ENDSWAP32(x) +# define LTOH32(x) ENDSWAP32(x) +# define HTOB32(x) (x) +# define BTOH32(x) (x) +# ifdef HAVE_UINT64 +# define HTOL64(x) ENDSWAP64(x) +# define LTOH64(x) ENDSWAP64(x) +# define HTOB64(x) (x) +# define BTOH64(x) (x) +# define HTOL64_(z, x) ENDSWAP64_(z, x) +# define LTOH64_(z, x) ENDSWAP64_(z, x) +# define HTOB64_(z, x) ((z).i = (x).i) +# define BTOH64_(z, x) ((z).i = (x).i) +# endif +# define HTOL64_(z, x) ENDSWAP64_(z, x) +# define LTOH64_(z, x) ENDSWAP64_(z, x) +# define HTOB64_(z, x) ASSIGN64(z, x) +# define BTOH64_(z, x) ASSIGN64(z, x) +#endif + +/* --- Unaligned access (GCC-specific) --- */ + +#if GCC_VERSION_P(3, 3) && CHAR_BIT == 8 +# define MLIB_MISALIGNED __attribute__((aligned(1), may_alias)) +# if __SIZEOF_SHORT__ == 2 + typedef MLIB_MISALIGNED unsigned short misaligned_uint16; +# define RAW16(p) (*(misaligned_uint16 *)(p)) +# endif +# if __SIZEOF_INT__ == 4 + typedef MLIB_MISALIGNED unsigned int misaligned_uint32; +# define RAW32(p) (*(misaligned_uint32 *)(p)) +# elif __SIZEOF_LONG__ == 4 + typedef MLIB_MISALIGNED unsigned long misaligned_uint32; +# define RAW32(p) (*(misaligned_uint32 *)(p)) +# endif +# if __SIZEOF_LONG__ == 8 + typedef MLIB_MISALIGNED unsigned long misaligned_uint64; +# define RAW64(p) (*(misaligned_uint64 *)(p)) +# elif __SIZEOF_LONG_LONG__ == 8 + typedef MLIB_MISALIGNED unsigned long long misaligned_uint64; +# define RAW64(p) (*(misaligned_uint64 *)(p)) +# endif +#endif + /* --- Storage and retrieval --- */ +#if defined(RAW16) && defined(LTOH16) +# define LOAD16_L(p) LTOH16(RAW16(p)) +#endif +#if defined(RAW16) && defined(HTOL16) +# define STORE16_L(p, x) (RAW16(p) = HTOL16(x)) +#endif +#if defined(RAW16) && defined(BTOH16) +# define LOAD16_B(p) BTOH16(RAW16(p)) +#endif +#if defined(RAW16) && defined(HTOB16) +# define STORE16_B(p, x) (RAW16(p) = HTOB16(x)) +#endif + +#if defined(RAW32) && defined(LTOH32) +# define LOAD32_L(p) LTOH32(RAW32(p)) +#endif +#if defined(RAW32) && defined(HTOL32) +# define STORE32_L(p, x) (RAW32(p) = HTOL32(x)) +#endif +#if defined(RAW32) && defined(BTOH32) +# define LOAD32_B(p) BTOH32(RAW32(p)) +#endif +#if defined(RAW32) && defined(HTOB32) +# define STORE32_B(p, x) (RAW32(p) = HTOB32(x)) +#endif + +#if defined(RAW64) && defined(LTOH64) +# define LOAD64_L(p) LTOH64(RAW64(p)) +#endif +#if defined(RAW64) && defined(HTOL64) +# define STORE64_L(p, x) (RAW64(p) = HTOL64(x)) +#endif +#if defined(RAW64) && defined(BTOH64) +# define LOAD64_B(p) BTOH64(RAW64(p)) +#endif +#if defined(RAW64) && defined(HTOB64) +# define STORE64_B(p, x) (RAW64(p) = HTOB64(x)) +#endif + #define GETBYTE(p, o) (((octet *)(p))[o] & MASK8) #define PUTBYTE(p, o, v) (((octet *)(p))[o] = U8((v))) @@ -429,19 +603,19 @@ typedef unsigned char octet, uint8; #else # define LOAD64_B_(d, p) \ - ((d).hi = LOAD32_B((octet *)(p) + 0), \ - (d).lo = LOAD32_B((octet *)(p) + 4)) + ((d).hi = LOAD32_B((octet *)(p) + 0), \ + (d).lo = LOAD32_B((octet *)(p) + 4)) # define LOAD64_L_(d, p) \ - ((d).lo = LOAD32_L((octet *)(p) + 0), \ - (d).hi = LOAD32_L((octet *)(p) + 4)) + ((d).lo = LOAD32_L((octet *)(p) + 0), \ + (d).hi = LOAD32_L((octet *)(p) + 4)) # define LOAD64_(d, p) LOAD64_B_((d), (p)) # define STORE64_B_(p, v) \ - (STORE32_B((octet *)(p) + 0, (v).hi), \ - STORE32_B((octet *)(p) + 4, (v).lo)) + (STORE32_B((octet *)(p) + 0, (v).hi), \ + STORE32_B((octet *)(p) + 4, (v).lo)) # define STORE64_L_(p, v) \ - (STORE32_L((octet *)(p) + 0, (v).lo), \ - STORE32_L((octet *)(p) + 4, (v).hi)) + (STORE32_L((octet *)(p) + 0, (v).lo), \ + STORE32_L((octet *)(p) + 4, (v).hi)) # define STORE64_(p, v) STORE64_B_((p), (v)) #endif