#ifdef KERNEL_PRIVATE
#ifndef _I386_LOCK_H_
#define _I386_LOCK_H_
#include <sys/appleapiopts.h>
#ifdef __APPLE_API_PRIVATE
#ifdef MACH_KERNEL_PRIVATE
#include <kern/macro_help.h>
#include <kern/assert.h>
#include <i386/hw_lock_types.h>
#include <i386/locks.h>
#include <mach_rt.h>
#include <mach_ldebug.h>
typedef struct {
lck_mtx_t lck_mtx;
#if MACH_LDEBUG
int type;
#define MUTEX_TAG 0x4d4d
vm_offset_t pc;
vm_offset_t thread;
#endif
} mutex_t;
typedef struct {
decl_simple_lock_data(,interlock)
volatile unsigned int
read_count:16,
want_upgrade:1,
want_write:1,
waiting:1,
can_sleep:1;
} lock_t;
extern unsigned int LockTimeOut;
#if defined(__GNUC__)
#define bit_lock(bit,l) \
__asm__ volatile(" jmp 1f \n \
0: btl %0, %1 \n \
jb 0b \n \
1: lock \n \
btsl %0,%1 \n \
jb 0b" : \
: \
"r" (bit), "m" (*(volatile int *)(l)) : \
"memory");
#define bit_unlock(bit,l) \
__asm__ volatile(" lock \n \
btrl %0,%1" : \
: \
"r" (bit), "m" (*(volatile int *)(l)));
#define i_bit_set(bit,l) \
__asm__ volatile(" lock \n \
btsl %0,%1" : \
: \
"r" (bit), "m" (*(volatile int *)(l)));
#define i_bit_clear(bit,l) \
__asm__ volatile(" lock \n \
btrl %0,%1" : \
: \
"r" (bit), "m" (*(volatile int *)(l)));
static inline unsigned long i_bit_isset(unsigned int test, volatile unsigned long *word)
{
int bit;
__asm__ volatile("btl %2,%1\n\tsbbl %0,%0" : "=r" (bit)
: "m" (word), "ir" (test));
return bit;
}
static inline char xchgb(volatile char * cp, char new);
static inline void atomic_incl(long * p, long delta);
static inline void atomic_incs(short * p, short delta);
static inline void atomic_incb(char * p, char delta);
static inline void atomic_decl(long * p, long delta);
static inline void atomic_decs(short * p, short delta);
static inline void atomic_decb(char * p, char delta);
static inline long atomic_getl(long * p);
static inline short atomic_gets(short * p);
static inline char atomic_getb(char * p);
static inline void atomic_setl(long * p, long value);
static inline void atomic_sets(short * p, short value);
static inline void atomic_setb(char * p, char value);
static inline char xchgb(volatile char * cp, char new)
{
register char old = new;
__asm__ volatile (" xchgb %0,%2" :
"=q" (old) :
"0" (new), "m" (*(volatile char *)cp) : "memory");
return (old);
}
static inline uint32_t
atomic_cmpxchg(uint32_t *p, uint32_t old, uint32_t new)
{
uint32_t res = old;
asm volatile(
"lock; cmpxchgl %1,%2; \n\t"
" setz %%al; \n\t"
" movzbl %%al,%0"
: "+a" (res)
: "r" (new),
"m" (*(p))
: "memory");
return (res);
}
static inline uint64_t
atomic_load64(uint64_t *quadp)
{
uint64_t ret;
asm volatile(
" lock; cmpxchg8b %1"
: "=A" (ret)
: "m" (*quadp), "a" (0), "d" (0), "b" (0), "c" (0));
return (ret);
}
static inline uint64_t
atomic_loadstore64(uint64_t *quadp, uint64_t new)
{
uint64_t ret;
ret = *quadp;
asm volatile(
"1: \n\t"
" lock; cmpxchg8b %1 \n\t"
" jnz 1b"
: "+A" (ret)
: "m" (*quadp),
"b" ((uint32_t)new), "c" ((uint32_t)(new >> 32)));
return (ret);
}
static inline void atomic_incl(long * p, long delta)
{
__asm__ volatile (" lock \n \
addl %0,%1" : \
: \
"r" (delta), "m" (*(volatile long *)p));
}
static inline void atomic_incs(short * p, short delta)
{
__asm__ volatile (" lock \n \
addw %0,%1" : \
: \
"q" (delta), "m" (*(volatile short *)p));
}
static inline void atomic_incb(char * p, char delta)
{
__asm__ volatile (" lock \n \
addb %0,%1" : \
: \
"q" (delta), "m" (*(volatile char *)p));
}
static inline void atomic_decl(long * p, long delta)
{
__asm__ volatile (" lock \n \
subl %0,%1" : \
: \
"r" (delta), "m" (*(volatile long *)p));
}
static inline int atomic_decl_and_test(long * p, long delta)
{
uint8_t ret;
asm volatile (
" lock \n\t"
" subl %1,%2 \n\t"
" sete %0"
: "=qm" (ret)
: "r" (delta), "m" (*(volatile long *)p));
return ret;
}
static inline void atomic_decs(short * p, short delta)
{
__asm__ volatile (" lock \n \
subw %0,%1" : \
: \
"q" (delta), "m" (*(volatile short *)p));
}
static inline void atomic_decb(char * p, char delta)
{
__asm__ volatile (" lock \n \
subb %0,%1" : \
: \
"q" (delta), "m" (*(volatile char *)p));
}
static inline long atomic_getl(long * p)
{
return (*p);
}
static inline short atomic_gets(short * p)
{
return (*p);
}
static inline char atomic_getb(char * p)
{
return (*p);
}
static inline void atomic_setl(long * p, long value)
{
*p = value;
}
static inline void atomic_sets(short * p, short value)
{
*p = value;
}
static inline void atomic_setb(char * p, char value)
{
*p = value;
}
#else
extern void i_bit_set(
int index,
void *addr);
extern void i_bit_clear(
int index,
void *addr);
extern void bit_lock(
int index,
void *addr);
extern void bit_unlock(
int index,
void *addr);
#endif
extern void kernel_preempt_check (void);
#endif
#endif
#endif
#endif