#import "auto_weak.h"
static void append_referrer_no_lock(weak_referrer_array_t *list, void **new_referrer, auto_weak_callback_block_t *new_block);
static inline uintptr_t hash(const void *key)
{
uintptr_t k = (uintptr_t)key;
#if __LP64__
uintptr_t a = 0x4368726973746F70ULL;
uintptr_t b = 0x686572204B616E65ULL;
#else
uintptr_t a = 0x4B616E65UL;
uintptr_t b = 0x4B616E65UL;
#endif
uintptr_t c = 1;
a += k;
#if __LP64__
a -= b; a -= c; a ^= (c >> 43);
b -= c; b -= a; b ^= (a << 9);
c -= a; c -= b; c ^= (b >> 8);
a -= b; a -= c; a ^= (c >> 38);
b -= c; b -= a; b ^= (a << 23);
c -= a; c -= b; c ^= (b >> 5);
a -= b; a -= c; a ^= (c >> 35);
b -= c; b -= a; b ^= (a << 49);
c -= a; c -= b; c ^= (b >> 11);
a -= b; a -= c; a ^= (c >> 12);
b -= c; b -= a; b ^= (a << 18);
c -= a; c -= b; c ^= (b >> 22);
#else
a -= b; a -= c; a ^= (c >> 13);
b -= c; b -= a; b ^= (a << 8);
c -= a; c -= b; c ^= (b >> 13);
a -= b; a -= c; a ^= (c >> 12);
b -= c; b -= a; b ^= (a << 16);
c -= a; c -= b; c ^= (b >> 5);
a -= b; a -= c; a ^= (c >> 3);
b -= c; b -= a; b ^= (a << 10);
c -= a; c -= b; c ^= (b >> 15);
#endif
return c;
}
static void grow_refs(weak_referrer_array_t *list)
{
unsigned old_num_allocated = list->num_allocated;
unsigned num_refs = list->num_refs;
weak_referrer_t *old_refs = list->refs;
unsigned new_allocated;
if (old_num_allocated == 0) {
new_allocated = 1;
} else if (old_num_allocated == 1) {
new_allocated = 2;
} else {
new_allocated = old_num_allocated + old_num_allocated - 1;
}
list->refs = aux_calloc(new_allocated, sizeof(weak_referrer_t));
list->num_allocated = new_allocated;
list->num_refs = 0;
list->max_hash_displacement = 0;
unsigned i;
for (i=0; i < old_num_allocated && num_refs > 0; i++) {
if (old_refs[i].referrer != NULL) {
append_referrer_no_lock(list, old_refs[i].referrer, old_refs[i].block);
num_refs--;
}
}
if (old_refs)
aux_free(old_refs);
}
static void append_referrer_no_lock(weak_referrer_array_t *list, void **new_referrer, auto_weak_callback_block_t *new_block)
{
if (list->num_refs >= list->num_allocated * 2 / 3) {
grow_refs(list);
}
unsigned index = hash(new_referrer) % list->num_allocated, hash_displacement = 0;
while (list->refs[index].referrer != NULL) {
index++;
hash_displacement++;
if (index == list->num_allocated)
index = 0;
}
if (list->max_hash_displacement < hash_displacement) {
list->max_hash_displacement = hash_displacement;
}
list->refs[index].referrer = new_referrer;
list->refs[index].block = new_block;
list->num_refs++;
}
static void remove_referrer_no_lock(weak_referrer_array_t *list, void **old_referrer)
{
unsigned index = hash(old_referrer) % list->num_allocated;
unsigned start_index = index, hash_displacement = 0;
while (list->refs[index].referrer != old_referrer) {
index++;
hash_displacement++;
if (index == list->num_allocated)
index = 0;
if (index == start_index || hash_displacement > list->max_hash_displacement) {
malloc_printf("%s: attempted to remove unregistered weak referrer %p\n", auto_prelude(), old_referrer);
return;
}
}
list->refs[index].referrer = NULL;
list->num_refs--;
}
static void weak_entry_insert_no_lock(azone_t *azone, weak_entry_t *new_entry)
{
weak_entry_t *table = azone->weak_refs_table;
if (!table) { malloc_printf("no auto weak ref table!\n"); return; }
unsigned table_size = azone->max_weak_refs;
unsigned hash_index = hash(new_entry->referent) % table_size;
unsigned index = hash_index;
do {
weak_entry_t *entry = table + index;
if (entry->referent == NULL) {
*entry = *new_entry;
return;
}
index++; if (index == table_size) index = 0;
} while (index != hash_index);
malloc_printf("no room for new entry in auto weak ref table!\n");
}
static void weak_entry_remove_no_lock(azone_t *azone, weak_entry_t *entry)
{
entry->referent = NULL;
if (entry->referrers.refs) aux_free(entry->referrers.refs);
entry->referrers.refs = NULL;
entry->referrers.num_refs = 0;
entry->referrers.num_allocated = 0;
weak_entry_t *table = azone->weak_refs_table;
unsigned table_size = azone->max_weak_refs;
unsigned hash_index = entry - table;
unsigned index = hash_index;
if (!table) return;
do {
index++; if (index == table_size) index = 0;
if (!table[index].referent) return;
weak_entry_t entry = table[index];
table[index].referent = NULL;
weak_entry_insert_no_lock(azone, &entry);
} while (index != hash_index);
}
static void weak_grow_maybe_no_lock(azone_t *azone)
{
if (azone->num_weak_refs >= azone->max_weak_refs * 3 / 4) {
unsigned old_max = azone->max_weak_refs;
unsigned new_max = old_max ? old_max * 2 + 1 : 15;
weak_entry_t *old_entries = azone->weak_refs_table;
weak_entry_t *new_entries = aux_calloc(new_max, sizeof(weak_entry_t));
azone->max_weak_refs = new_max;
azone->weak_refs_table = new_entries;
if (old_entries) {
weak_entry_t *entry;
weak_entry_t *end = old_entries + old_max;
for (entry = old_entries; entry < end; entry++) {
weak_entry_insert_no_lock(azone, entry);
}
aux_free(old_entries);
}
}
}
void **auto_weak_find_first_referrer(auto_zone_t *zone, void **location, unsigned long count) {
azone_t *azone = (azone_t *)zone;
spin_lock(&azone->weak_refs_table_lock);
unsigned long int counter = 0;
for (; counter < azone->max_weak_refs; ++counter) {
if (!azone->weak_refs_table[counter].referent) continue;
weak_referrer_t *refs = azone->weak_refs_table[counter].referrers.refs;
unsigned long index = 0;
for (; index < azone->weak_refs_table[counter].referrers.num_allocated; ++index) {
if ((refs[index].referrer >= location) && (refs[index].referrer < (location + count))) {
void **result = refs[index].referrer;
spin_unlock(&azone->weak_refs_table_lock);
return result;
}
}
}
spin_unlock(&azone->weak_refs_table_lock);
return NULL;
}
static weak_entry_t *weak_entry_for_referent(azone_t *azone, const void *referent)
{
weak_entry_t *table = azone->weak_refs_table;
if (!table) return NULL;
unsigned table_size = azone->max_weak_refs;
unsigned hash_index = hash(referent) % table_size;
unsigned index = hash_index;
do {
weak_entry_t *entry = table + index;
if (entry->referent == referent) return entry;
if (entry->referent == NULL) return NULL;
index++; if (index == table_size) index = 0;
} while (index != hash_index);
return NULL;
}
static void weak_clear_entry_no_lock(azone_t *azone, weak_entry_t *entry, uintptr_t *weak_refs_count, auto_weak_callback_block_t **head)
{
unsigned count = entry->referrers.num_allocated;
unsigned index = 0;
for (; index < count; ++index) {
weak_referrer_t *ref = &entry->referrers.refs[index];
if (ref->referrer) {
if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: clearing ref to %p at %p (value was %p)\n", auto_prelude(), entry->referent, ref->referrer, *ref->referrer);
if (*ref->referrer != entry->referent) {
malloc_printf("__weak value %p at location %p not equal to %p and so will not be cleared\n", *ref->referrer, ref->referrer, entry->referent);
void **base = (void **)auto_zone_base_pointer((auto_zone_t*)azone, ref->referrer);
if (base) {
auto_memory_type_t type = auto_zone_get_layout_type((auto_zone_t*)azone, base);
malloc_printf("...location is %s starting at %p with first slot value %p\n",
(type & AUTO_OBJECT) ? "an object" : "a data block",
base,
*base);
}
continue;
}
*ref->referrer = NULL;
++*weak_refs_count;
if (ref->block && ref->block->callback_function && !ref->block->next) {
ref->block->next = *head;
*head = ref->block;
}
}
}
weak_entry_remove_no_lock(azone, entry);
}
auto_weak_callback_block_t *weak_clear_references(azone_t *azone, size_t garbage_count, vm_address_t *garbage,
uintptr_t *weak_referents_count, uintptr_t *weak_refs_count)
{
unsigned i;
*weak_referents_count = *weak_refs_count = 0;
auto_weak_callback_block_t *head = (void *)-1;
spin_lock(&azone->weak_refs_table_lock);
if (azone->num_weak_refs == 0) {
spin_unlock(&azone->weak_refs_table_lock);
return NULL;
}
for (i = 0; i < garbage_count; i++) {
weak_entry_t *entry = weak_entry_for_referent(azone, (void *)garbage[i]);
if (entry) {
weak_clear_entry_no_lock(azone, entry, weak_refs_count, &head);
++*weak_referents_count;
}
}
spin_unlock(&azone->weak_refs_table_lock);
return head;
}
static void weak_unregister_no_lock(azone_t *azone, const void *referent, void **referrer)
{
weak_entry_t *entry;
if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: unregistering weak reference to %p at %p\n", auto_prelude(), referent, referrer);
if ((entry = weak_entry_for_referent(azone, referent))) {
remove_referrer_no_lock(&entry->referrers, referrer);
if (entry->referrers.num_refs == 0) {
weak_entry_remove_no_lock(azone, entry);
azone->num_weak_refs--;
}
}
}
void weak_register(azone_t *azone, const void *referent, void **referrer, auto_weak_callback_block_t *block)
{
weak_entry_t *entry;
if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: registering weak reference to %p at %p\n", auto_prelude(), referent, referrer);
spin_lock(&azone->weak_refs_table_lock);
if (*referrer) weak_unregister_no_lock(azone, *referrer, referrer);
if (referent) {
if ((entry = weak_entry_for_referent(azone, referent))) {
append_referrer_no_lock(&entry->referrers, referrer, block);
}
else {
weak_entry_t new_entry;
new_entry.referent = referent;
new_entry.referrers.refs = NULL;
new_entry.referrers.num_refs = 0;
new_entry.referrers.num_allocated = 0;
append_referrer_no_lock(&new_entry.referrers, referrer, block);
weak_grow_maybe_no_lock(azone);
azone->num_weak_refs++;
weak_entry_insert_no_lock(azone, &new_entry);
}
}
*referrer = (void *)referent;
spin_unlock(&azone->weak_refs_table_lock);
}
void weak_unregister(azone_t *azone, const void *referent, void **referrer)
{
spin_lock(&azone->weak_refs_table_lock);
weak_unregister_no_lock(azone, referent, referrer);
spin_unlock(&azone->weak_refs_table_lock);
}
void weak_unregister_with_layout(azone_t *azone, void *block[], const unsigned char *layout) {
size_t index = 0;
unsigned char byte;
spin_lock(&azone->weak_refs_table_lock);
while (byte = *layout++) {
unsigned skips = (byte >> 4);
while (skips--) index++;
unsigned weaks = (byte & 0x0F);
while (weaks--) {
void **slot = &block[index++];
const void *referent = *slot;
if (referent) {
weak_entry_t *entry = weak_entry_for_referent(azone, referent);
if (entry) {
remove_referrer_no_lock(&entry->referrers, slot);
if (entry->referrers.num_refs == 0) {
weak_entry_remove_no_lock(azone, entry);
azone->num_weak_refs--;
}
}
}
}
}
spin_unlock(&azone->weak_refs_table_lock);
}
void weak_call_callbacks(auto_weak_callback_block_t *block) {
if (block == NULL) return;
while (block != (void *)-1) {
auto_weak_callback_block_t *next = block->next;
block->next = NULL;
if (block->callback_function) {
(*block->callback_function)(block->arg1, block->arg2);
}
block = next;
}
}
__private_extern__ void weak_print_stats(void)
{
azone_t *azone = (azone_t *)auto_zone();
if (!azone) {
fprintf(stderr, "weak table empty (GC off)\n");
return;
}
weak_entry_t *table = azone->weak_refs_table;
if (!table) {
fprintf(stderr, "weak table empty\n");
return;
}
unsigned chainlen = 0;
unsigned chaincount = 0;
unsigned chain = 0;
unsigned chainmax = 0;
unsigned table_size = azone->max_weak_refs;
unsigned start;
unsigned i;
for (start = 0; start < azone->max_weak_refs; start++) {
weak_entry_t *entry = table + start;
if (! entry->referent) break;
}
for ( ; start < azone->max_weak_refs; start++) {
weak_entry_t *entry = table + start;
if (entry->referent) break;
}
if (start == azone->max_weak_refs) {
fprintf(stderr, "weak table empty\n");
return;
}
i = start;
do {
weak_entry_t *entry = table + i;
if (entry->referent) chain++;
else if (chain) {
if (chain > chainmax) chainmax = chain;
chainlen += chain;
chaincount++;
chain = 0;
}
i++; if (i == table_size) i = 0;
} while (i != start);
fprintf(stderr, "weak table %u/%u used, %.1g avg / %u max chain\n",
chainlen, azone->max_weak_refs,
chaincount ? chainlen/(double)chaincount : 0.0, chainmax);
}