#ifndef __SYSDEP_LOCKS_H__
#define __SYSDEP_LOCKS_H__
typedef size_t obj_addr_t;
#ifdef __arch64__
inline static bool
compare_and_swap(volatile obj_addr_t *addr,
obj_addr_t old,
obj_addr_t new_val)
{
__asm__ __volatile__("casx [%2], %3, %0\n\t"
"membar #StoreLoad | #StoreStore"
: "=&r" (new_val)
: "0" (new_val), "r" (addr), "r" (old)
: "memory");
return (new_val == old) ? true : false;
}
inline static void
release_set(volatile obj_addr_t *addr, obj_addr_t new_val)
{
__asm__ __volatile__("membar #StoreStore | #LoadStore" : : : "memory");
*(addr) = new_val;
}
inline static bool
compare_and_swap_release(volatile obj_addr_t *addr,
obj_addr_t old,
obj_addr_t new_val)
{
return compare_and_swap(addr, old, new_val);
}
#else
static unsigned char __cas_lock = 0;
inline static void
__cas_start_atomic(void)
{
unsigned int tmp;
__asm__ __volatile__(
"1: ldstub [%1], %0\n"
" orcc %0, 0x0, %g0\n"
" be 3f\n"
" nop\n"
"2: ldub [%1], %0\n"
" orcc %0, 0x0, %g0\n"
" bne 2b\n"
" nop\n"
"3:" : "=&r" (tmp)
: "r" (&__cas_lock)
: "memory", "cc");
}
inline static void
__cas_end_atomic(void)
{
__asm__ __volatile__(
"stb %%g0, [%0]"
:
: "r" (&__cas_lock)
: "memory");
}
inline static bool
compare_and_swap(volatile obj_addr_t *addr,
obj_addr_t old,
obj_addr_t new_val)
{
bool ret;
__cas_start_atomic ();
if (*addr != old)
{
ret = false;
}
else
{
*addr = new_val;
ret = true;
}
__cas_end_atomic ();
return ret;
}
inline static void
release_set(volatile obj_addr_t *addr, obj_addr_t new_val)
{
__asm__ __volatile__("" : : : "memory");
*(addr) = new_val;
}
inline static bool
compare_and_swap_release(volatile obj_addr_t *addr,
obj_addr_t old,
obj_addr_t new_val)
{
return compare_and_swap(addr, old, new_val);
}
#endif
#endif