#include <vm/vm_kern.h>
#include <mach/machine.h>
#include <i386/cpu_threads.h>
#include <i386/cpuid.h>
#include <i386/machine_cpu.h>
#include <i386/lock.h>
#include <i386/perfmon.h>
int idlehalt = 1;
static boolean_t
cpu_is_hyperthreaded(void)
{
if (cpuid_features() & CPUID_FEATURE_HTT)
return (cpuid_info()->cpuid_logical_per_package /
cpuid_info()->cpuid_cores_per_package) > 1;
else
return FALSE;
}
void *
cpu_thread_alloc(int cpu)
{
int core_base_cpu;
int ret;
cpu_core_t *core;
if (cpu_is_hyperthreaded()) {
core_base_cpu = cpu_to_core_cpu(cpu);
cpu_datap(cpu)->cpu_threadtype = CPU_THREADTYPE_INTEL_HTT;
} else {
core_base_cpu = cpu;
cpu_datap(cpu)->cpu_threadtype = CPU_THREADTYPE_NONE;
}
core = (cpu_core_t *) cpu_to_core(core_base_cpu);
if (core == NULL) {
ret = kmem_alloc(kernel_map,
(void *) &core, sizeof(cpu_core_t));
if (ret != KERN_SUCCESS)
panic("cpu_thread_alloc() kmem_alloc ret=%d\n", ret);
bzero((void *) core, sizeof(cpu_core_t));
core->base_cpu = core_base_cpu;
atomic_incl((long *) &machine_info.physical_cpu_max, 1);
core->pmc = pmc_alloc();
}
atomic_incl((long *) &machine_info.logical_cpu_max, 1);
return (void *) core;
}
void
cpu_thread_init(void)
{
int my_cpu = get_cpu_number();
cpu_core_t *my_core;
if (my_cpu == master_cpu)
cpu_to_core(master_cpu) = cpu_thread_alloc(master_cpu);
my_core = cpu_core();
if (my_core == NULL)
panic("cpu_thread_init() no core allocated for cpu %d", my_cpu);
atomic_incl((long *) &my_core->active_threads, 1);
atomic_incl((long *) &machine_info.logical_cpu, 1);
if (my_core->num_threads == 0)
atomic_incl((long *) &machine_info.physical_cpu, 1);
atomic_incl((long *) &my_core->num_threads, 1);
}
void
cpu_thread_halt(void)
{
cpu_core_t *my_core = cpu_core();
atomic_decl((long *) &machine_info.logical_cpu, 1);
atomic_decl((long *) &my_core->active_threads, 1);
if (atomic_decl_and_test((long *) &my_core->num_threads, 1))
atomic_decl((long *) &machine_info.physical_cpu, 1);
cpu_halt();
}