
static void __bad_cmpxchg(volatile void *ptr, int size)
{
}

static void __bad_xchg(volatile void *ptr, int size)
{
}

/*
 * cmpxchg only support 32-bits operands on ARMv6.
 */

unsigned long __arch_cmpxchg(volatile void *ptr, unsigned long old,
                        unsigned long new, int size)
{
    unsigned long oldval, res;

    switch (size)
    {
    case 1:
        do
        {
            __asm__ volatile("@ __cmpxchg1\n"
                             "	ldrexb	%1, [%2]\n"
                             "	mov	%0, #0\n"
                             "	teq	%1, %3\n"
                             "	it eq\n"
                             "	strexbeq %0, %4, [%2]\n"
                             : "=&r"(res), "=&r"(oldval)
                             : "r"(ptr), "Ir"(old), "r"(new)
                             : "memory", "cc");
        } while (res);
        break;
    case 2:
        do
        {
            __asm__ volatile("@ __cmpxchg1\n"
                             "	ldrexh	%1, [%2]\n"
                             "	mov	%0, #0\n"
                             "	teq	%1, %3\n"
                             "	it eq\n"
                             "	strexheq %0, %4, [%2]\n"
                             : "=&r"(res), "=&r"(oldval)
                             : "r"(ptr), "Ir"(old), "r"(new)
                             : "memory", "cc");
        } while (res);
        break;
    case 4:
        do
        {
            __asm__ volatile("@ __cmpxchg4\n"
                             "	ldrex	%1, [%2]\n"
                             "	mov	%0, #0\n"
                             "	teq	%1, %3\n"
                             "	it eq\n"
                             "	strexeq %0, %4, [%2]\n"
                             : "=&r"(res), "=&r"(oldval)
                             : "r"(ptr), "Ir"(old), "r"(new)
                             : "memory", "cc");
        } while (res);
        break;
    default:
        __bad_cmpxchg(ptr, size);
        oldval = 0;
    }

    return oldval;
}

unsigned long __arch_xchg(unsigned long x, volatile void *ptr, int size)
{
    unsigned long ret;
    unsigned int tmp;

    switch (size)
    {
    case 1:
        asm volatile("@	__xchg1\n"
                     "1:	ldrexb	%0, [%3]\n"
                     "	strexb	%1, %2, [%3]\n"
                     "	teq	%1, #0\n"
                     "	bne	1b"
                     : "=&r"(ret), "=&r"(tmp)
                     : "r"(x), "r"(ptr)
                     : "memory", "cc");
        break;
    case 2:
        asm volatile("@	__xchg2\n"
                     "1:	ldrexh	%0, [%3]\n"
                     "	strexh	%1, %2, [%3]\n"
                     "	teq	%1, #0\n"
                     "	bne	1b"
                     : "=&r"(ret), "=&r"(tmp)
                     : "r"(x), "r"(ptr)
                     : "memory", "cc");
        break;
    case 4:
        asm volatile("@	__xchg4\n"
                     "1:	ldrex	%0, [%3]\n"
                     "	strex	%1, %2, [%3]\n"
                     "	teq	%1, #0\n"
                     "	bne	1b"
                     : "=&r"(ret), "=&r"(tmp)
                     : "r"(x), "r"(ptr)
                     : "memory", "cc");
        break;
    default:
        /* Cause a link-time error, the xchg() size is not supported */
        __bad_xchg(ptr, size);
        ret = 0;
        break;
    }

    return ret;
}
