/*
 * Engage compiler specific rotate intrinsic function if
 * available.
 */
#ifndef __ROTATE_H
#define __ROTATE_H

#undef ROL32
#ifndef PEDANTIC
#if defined(_MSC_VER)
#define ROL32(a, n) _lrotl(a, n)
#elif defined(__ICC)
#define ROL32(a, n) _rotl(a, n)
#elif defined(__GNUC__) && __GNUC__ >= 2 && !defined(OPENSSL_NO_ASM) && \
    !defined(OPENSSL_NO_INLINE_ASM)
/*
 * Some GNU C inline assembler templates. Note that these
 * are rotates by *constant* number of bits! But that's
 * exactly what we need here... <appro@fy.chalmers.se>
 */
#if defined(__i386) || defined(__i386__) || defined(__x86_64) || \
    defined(__x86_64__)
#define ROL32(a, n)                                                            \
    ({                                                                         \
        register unsigned int ret;                                             \
        asm("roll %1,%0" : "=r"(ret) : "I"(n), "0"((unsigned int)(a)) : "cc"); \
        ret;                                                                   \
    })
#elif defined(_ARCH_PPC) || defined(_ARCH_PPC64) || defined(__powerpc) || \
    defined(__ppc__) || defined(__powerpc64__)
#define ROL32(a, n)                                               \
    ({                                                            \
        register unsigned int ret;                                \
        asm("rlwinm %0,%1,%2,0,31" : "=r"(ret) : "r"(a), "I"(n)); \
        ret;                                                      \
    })
#elif defined(__s390x__)
#define ROL32(a, n)                                       \
    ({                                                    \
        register unsigned int ret;                        \
        asm("rll %0,%1,%2" : "=r"(ret) : "r"(a), "I"(n)); \
        ret;                                              \
    })
#endif
#endif
#endif /* PEDANTIC */

#ifndef ROL32
#define ROL32(a, n) (((a) << (n)) | (((a)&0xffffffff) >> (32 - (n))))
#endif

#endif  // __ROTATE_H