#include <vm/vm_kern.h>
#include <mach/machine.h>
#include <i386/cpu_threads.h>
#include <i386/cpuid.h>
#include <i386/machine_cpu.h>
#include <i386/lock.h>
int idlehalt = 1;
void
cpu_thread_init(void)
{
int my_cpu = get_cpu_number();
int my_core_base_cpu;
int ret;
cpu_core_t *my_core;
if (cpu_core())
return;
if (cpuid_features() & CPUID_FEATURE_HTT) {
my_core_base_cpu = cpu_to_core_cpu(my_cpu);
current_cpu_datap()->cpu_threadtype = CPU_THREADTYPE_INTEL_HTT;
} else {
my_core_base_cpu = my_cpu;
current_cpu_datap()->cpu_threadtype = CPU_THREADTYPE_NONE;
}
my_core = (cpu_core_t *) cpu_to_core(my_core_base_cpu);
if (my_core == NULL) {
cpu_core_t *new_core;
ret = kmem_alloc(kernel_map,
(void *) &new_core, sizeof(cpu_core_t));
if (ret != KERN_SUCCESS)
panic("cpu_thread_init() kmem_alloc ret=%d\n", ret);
bzero((void *) new_core, sizeof(cpu_core_t));
new_core->base_cpu = my_core_base_cpu;
if (atomic_cmpxchg((uint32_t *) &cpu_to_core(my_core_base_cpu),
0, (uint32_t) new_core)) {
atomic_incl((long *) &machine_info.physical_cpu, 1);
atomic_incl((long *) &machine_info.physical_cpu_max, 1);
} else {
kmem_free(kernel_map,
(vm_offset_t)new_core, sizeof(cpu_core_t));
}
my_core = (cpu_core_t *) cpu_to_core(my_core_base_cpu);
}
cpu_to_core(my_cpu) = (struct cpu_core *) my_core;
atomic_incl((long *) &my_core->active_threads, 1);
atomic_incl((long *) &my_core->num_threads, 1);
atomic_incl((long *) &machine_info.logical_cpu, 1);
atomic_incl((long *) &machine_info.logical_cpu_max, 1);
}
void
cpu_thread_halt(void)
{
cpu_core_t *my_core = cpu_core();
atomic_decl((long *) &my_core->active_threads, 1);
atomic_decl((long *) &my_core->num_threads, 1);
atomic_decl((long *) &machine_info.logical_cpu, 1);
cpu_halt();
}