#ifndef __BITOPS_H__
#define __BITOPS_H__

/*
 * These have to be done with inline assembly: that way the bit-setting
 * is guaranteed to be atomic. All bit operations return 0 if the bit
 * was cleared before the operation and != 0 if it was not.
 *
 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
 */

#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
/* Technically wrong, but this avoids compilation errors on some gcc
   versions. */
#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
#else
#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
#endif

#define ADDR        BITOP_ADDR(addr)

/**
 * test_and_set_bit - Set a bit and return its old value
 * @nr: Bit to set
 * @addr: Address to count from
 *
 * This operation is atomic and cannot be reordered.
 * It also implies a memory barrier.
 */
static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
{
  int oldbit;

  asm volatile("bts %2,%1\n\t"
         "sbb %0,%0" : "=r" (oldbit), ADDR : "Ir" (nr) : "memory");

  return oldbit;
}

/**
 * test_and_clear_bit - Clear a bit and return its old value
 * @nr: Bit to clear
 * @addr: Address to count from
 *
 * This operation is atomic and cannot be reordered.
 * It also implies a memory barrier.
 */
static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
{
  int oldbit;

  asm volatile("btr %2,%1\n\t"
         "sbb %0,%0"
         : "=r" (oldbit), ADDR : "Ir" (nr) : "memory");

  return oldbit;
}

/**
 * fls - find last set bit in word
 * @x: the word to search
 *
 * This is defined in a similar way as the libc and compiler builtin
 * ffs, but returns the position of the most significant set bit.
 *
 * fls(value) returns 0 if value is 0 or the position of the last
 * set bit if value is nonzero. The last (most significant) bit is
 * at position 32.
 */
static inline int fls(int x)
{
  int r;
  asm("bsrl %1,%0\n\t"
      "cmovzl %2,%0"
      : "=&r" (r) : "rm" (x), "rm" (-1));
  return r + 1;
} 

static inline int fls64(unsigned long long x)
{
  unsigned long h = x >> 32;
  if (h)
    return fls(h) + 32;
  return fls(x);
}

#define IS_IMMEDIATE(nr)    (__builtin_constant_p(nr))
#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3))
#define CONST_MASK(nr)      (1 << ((nr) & 7))

/**
 * clear_bit - Clears a bit in memory
 * @nr: Bit to clear
 * @addr: Address to start counting from
 *
 * clear_bit() is atomic and may not be reordered.  However, it does
 * not contain a memory barrier, so if it is used for locking purposes,
 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
 * in order to ensure changes are visible on other processors.
 */
static inline void
clear_bit(int nr, volatile unsigned long *addr)
{
  if (IS_IMMEDIATE(nr)) {
    asm volatile("andb %1,%0"
      : CONST_MASK_ADDR(nr, addr)
      : "iq" ((u8_t)~CONST_MASK(nr)));
  } else {
    asm volatile("btr %1,%0"
      : BITOP_ADDR(addr)
      : "Ir" (nr));
  }
}

/**
 * ffz - find first zero bit in word
 * @word: The word to search
 *
 * Undefined if no zero exists, so code should check against ~0UL first.
 */
static inline unsigned long ffz(unsigned long word)
{
  asm("bsf %1,%0"
    : "=r" (word)
    : "r" (~word));
  return word;
}

#define ext2_set_bit_atomic(lock, nr, addr)     \
  test_and_set_bit((nr), (unsigned long *)(addr))
#define ext2_clear_bit_atomic(lock, nr, addr)     \
  test_and_clear_bit((nr), (unsigned long *)(addr))

static inline int constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
{
  return ((1UL << (nr % BITS_PER_LONG)) &
    (((unsigned long *)addr)[nr / BITS_PER_LONG])) != 0;
}

static inline int variable_test_bit(int nr, volatile const unsigned long *addr)
{
  int oldbit;

  asm volatile("bt %2,%1\n\t"
         "sbb %0,%0"
         : "=r" (oldbit)
         : "m" (*(unsigned long *)addr), "Ir" (nr));

  return oldbit;
}

#define test_bit(nr, addr)      \
  (__builtin_constant_p((nr))   \
   ? constant_test_bit((nr), (addr))  \
   : variable_test_bit((nr), (addr)))

#endif
