#ifndef _H_SBATOMIC_H_
#define _H_SBATOMIC_H_

typedef long                        sb_atomic_int_t;
typedef unsigned long               sb_atomic_uint_t;
extern int sb_ncpu;

typedef volatile sb_atomic_uint_t  sb_atomic_t;

/* lock == old ? set : old */
#define sb_atomic_cmp_set(lock, old, set)       __sync_bool_compare_and_swap(lock, old, set)

#define sb_atomic_fetch_add(value, add)         __sync_fetch_and_add(value, add)

#define sb_memory_barrier()                     __sync_synchronize()

/* 0: unlock status 1: lock status */
static inline void sb_spinlock(sb_atomic_t *lock)
{
    unsigned long  long i, n;
    int spin = 60;
    sb_atomic_t value = 1;

    for ( ;; ) {

        if (*lock == 0 && sb_atomic_cmp_set(lock, 0, 1)) {
            return;
        }

        if (sb_ncpu > 1) {
            for (n = 1; n < spin; n <<= 1) {
		for (i = 0; i < n; i++) { }
                if (*lock == 0 && sb_atomic_cmp_set(lock, 0, 1)) {
                    return;
                }
            }
        }

        sched_yield();
    }
}

static inline void sb_spinunlock(sb_atomic_t *lock)
{
	if (*lock == 0)
		return;
        sb_atomic_cmp_set(lock, 1, 0);
}

static inline void sb_spininit(sb_atomic_t *lock)
{
	*lock = 0;
}

static inline int sb_spintrylock(sb_atomic_t *lock)
{
	return (*lock == 0 && sb_atomic_cmp_set(lock, 0, 1));
}

#endif
