#include "AutoAdmin.h"
#include "AutoBitmap.h"
#include "AutoBlockIterator.h"
#include "AutoCollector.h"
#include "AutoConfiguration.h"
#include "AutoDefs.h"
#include "AutoEnvironment.h"
#include "AutoHashTable.h"
#include "AutoLarge.h"
#include "AutoList.h"
#include "AutoListTypes.h"
#include "AutoLock.h"
#include "AutoMonitor.h"
#include "AutoRange.h"
#include "AutoRegion.h"
#include "AutoStatistics.h"
#include "AutoSubzone.h"
#include "AutoMemoryScanner.h"
#include "AutoThread.h"
#include "AutoWriteBarrierIterator.h"
#include "AutoZone.h"
#include "auto_weak.h"
#include "auto_trace.h"
namespace Auto {
#if defined(DEBUG)
#warning DEBUG is set
#endif
bool Zone::_is_auto_initialized = false;
Zone *Zone::_last_created = NULL;
void Zone::setup_shared() {
_is_auto_initialized = true;
Environment::_agc_env.initialize();
if (!aux_zone && !Zone::zone()) {
aux_zone = malloc_default_zone();
}
}
Zone::Zone()
: _enlivening_queue(_stats), _garbage_list(_stats)
{
ASSERTION(page_size == vm_page_size);
if (!_is_auto_initialized) setup_shared();
void *next = displace(this, admin_offset());
pthread_key_create(&_registered_thread_key, Thread::destroy_registered_thread);
_registered_threads = NULL;
_registered_threads_lock = 0;
pthread_key_create(&_thread_finalizing_key, NULL);
_in_subzone.initialize(subzone_quantum_max, next);
next = displace(next, Bitmap::bytes_needed(subzone_quantum_max));
_in_large.initialize(allocate_quantum_large_max, next);
next = displace(next, Bitmap::bytes_needed(allocate_quantum_large_max));
#if UseArena
_large_bits.initialize(allocate_quantum_large_max, next);
_large_bits_lock = 0;
next = displace(next, Bitmap::bytes_needed(allocate_quantum_large_max));
_arena = allocate_memory(1ul << arena_size_log2, 1ul << arena_size_log2);
if (!_arena) {
malloc_printf("can't allocate arena for GC\n");
abort();
}
_large_start = NULL;
_coverage.set_range(_arena, 1ul << arena_size_log2);
#else
_coverage.set_range((void *)~0, (void *)0);
#endif
_large_list = NULL;
_large_lock = 0;
_roots_lock = 0;
_zombies_lock = 0;
_region_list = NULL;
_region_lock = 0;
_retains_lock = 0;
_coverage_lock = 0;
_is_partial = false;
_repair_write_barrier = false;
_some_pending = false;
_use_pending = true;
_needs_enlivening = false;
_enlivening_lock = 0;
_state = idle;
_stats.reset();
usword_t data_size = bytes_needed();
_stats.add_admin(data_size);
_bytes_allocated = 0;
allocate_region();
_monitor = Monitor::monitor();
_deallocate_large = &Zone::deallocate_large_normal;
_last_created = this;
}
Zone::~Zone() {
for (Large *large = _large_list; large; ) {
Large *next = large->next();
large->deallocate(this);
large = next;
}
for (Region *region = _region_list; region != NULL; region = region->next()) {
Region *next = region->next();
delete region;
region = next;
}
_region_list = NULL;
}
#if UseArena
void *Zone::arena_allocate_large(usword_t size) {
usword_t seeksize = (size + allocate_quantum_large - 1) & ~(allocate_quantum_large-1);
usword_t nbits = seeksize >> allocate_quantum_large_log2;
usword_t start = 0;
usword_t end = 1ul << (arena_size_log2 - allocate_quantum_large_log2 - 1);
if (nbits > (end - start)) {
return NULL;
}
end -= nbits; SpinLock lock(&_large_bits_lock);
while (start <= end) {
if (_large_bits.bits_are_clear(start, nbits)) {
_large_bits.set_bits(start, nbits);
return displace(_large_start, start << allocate_quantum_large_log2);
}
start += 1;
}
return NULL;
}
void *Zone::arena_allocate_region(usword_t newsize) {
if (_large_start) return NULL;
usword_t roundedsize = (newsize + subzone_quantum - 1) & ~(subzone_quantum-1);
_large_start = displace(_arena, roundedsize);
return _arena;
}
void Zone::arena_deallocate(void *address, size_t size) {
usword_t seeksize = (size + allocate_quantum_large - 1) & ~(allocate_quantum_large-1);
usword_t nbits = seeksize >> allocate_quantum_large_log2;
usword_t start = ((char *)address - (char *)_large_start) >> allocate_quantum_large_log2;
SpinLock lock(&_large_bits_lock);
_large_bits.clear_bits(start, nbits);
}
#else
void *Zone::arena_allocate_large(usword_t size) {
return allocate_memory(size, allocate_quantum_large, VM_MEMORY_MALLOC_LARGE);
}
void Zone::arena_deallocate(void *address, size_t size) {
deallocate_memory(address, size);
}
#endif
Region *Zone::allocate_region() {
Region *region = Region::new_region(this);
if (region) {
SpinLock lock(&_region_lock);
{
SpinLock lock(&_coverage_lock);
_coverage.expand_range(*region);
}
if (_region_list == NULL) {
_region_list = region;
} else {
Region *last_region = _region_list, *next_region = last_region->next();
while (next_region != NULL) {
last_region = next_region;
next_region = next_region->next();
}
last_region->set_next(region);
}
if (!_scan_stack.is_allocated()) {
_scan_stack.set_range(region->scan_space());
}
}
return region;
}
void *Zone::allocate_large(const size_t size, const unsigned layout, bool clear, bool refcount_is_one) {
Large *large = Large::allocate(this, size, layout, refcount_is_one);
if (large) {
void *address = large->address();
usword_t size = large->size();
#if UseArena
bzero(address, size);
#endif
SpinLock lock(&_large_lock);
{
ConditionBarrier barrier(&_needs_enlivening, &_enlivening_lock);
if (barrier) enlivening_queue().add(address);
large->set_next(_large_list);
large->set_prev((Large*)0);
if (_large_list) _large_list->set_prev(large);
_large_list = large;
}
{
SpinLock lock(&_coverage_lock);
Range large_range(address, size);
_coverage.expand_range(large_range);
}
_in_large.set_bit(Large::quantum_index(address));
_stats.add_count(1);
_stats.add_size(size);
_stats.add_dirty(size);
_stats.add_allocated(size);
add_allocated_bytes(size);
return address;
}
return NULL;
}
void Zone::deallocate_large_normal(void *block) {
_in_large.clear_bit(Large::quantum_index(block));
Large *large = Large::large(block);
usword_t size = large->size();
_stats.add_count(-1);
_stats.add_size(-size); _stats.add_allocated(-size); _stats.add_dirty(-size);
Large *prev = large->prev();
Large *next = large->next();
if (prev) prev->set_next(next);
else _large_list = next;
if (next) next->set_prev(prev);
large->deallocate(this);
}
void Zone::deallocate_large_collecting(void *block) {
Large *large = Large::large(block);
ASSERTION(!large->is_freed());
large->set_freed();
}
void *Zone::allocate_small_medium(const size_t size, const unsigned layout, bool clear, bool refcount_is_one) {
Region *region = _region_list;
for (; region != NULL; region = region->next()) {
void *block = region->allocate(size, layout, clear, refcount_is_one);
if (block) return block;
}
region = allocate_region();
if (!region) {
control.will_grow((auto_zone_t *)this, AUTO_HEAP_ARENA_EXHAUSTED);
return NULL;
}
return region->allocate(size, layout, clear, refcount_is_one);
}
void Zone::deallocate_small_medium(void *block) {
Subzone *subzone = Subzone::subzone(block);
Admin *admin = subzone->admin();
Region *region = admin->region();
region->deallocate(subzone, block);
}
void *Zone::block_allocate(const size_t size, const unsigned layout, bool clear, bool refcount_is_one) {
void *block;
usword_t needed = size;
if (needed < allocate_quantum_large) {
if (!needed) needed = 1;
block = allocate_small_medium(needed, layout, clear, refcount_is_one);
} else {
block = allocate_large(needed, layout, clear, refcount_is_one);
}
return block;
}
void Zone::block_deallocate(void *block) {
erase_associations(block);
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
SpinLock adminLock(subzone->admin()->lock());
dec_refcount_small_medium(subzone, block);
subzone->set_layout(block, AUTO_MEMORY_UNSCANNED);
} else if (in_large_memory(block)) {
deallocate_large(block);
} else {
error("Deallocating a non-block", block);
}
}
void Zone::block_deallocate_internal(void *block) {
if (_associations.size() != 0) {
PtrAssocHashMap::iterator iter = _associations.find(block);
if (iter != _associations.end()) _associations.erase(iter);
}
if (in_subzone_memory(block)) {
deallocate_small_medium(block);
} else if (in_large_memory(block)) {
deallocate_large(block);
} else {
error("Deallocating a non-block", block);
}
}
void *Zone::block_start_large(void *address) {
if (_coverage.in_range(address)) {
SpinLock lock(&_large_lock); usword_t q = Large::quantum_index(address);
if (!_in_large.bit(q)) {
q = _in_large.previous_set(q);
if (q == not_found) return NULL;
}
#if UseArena
Large *large = Large::quantum_large(q, _arena);
#else
Large *large = Large::quantum_large(q, (void *)0);
#endif
if (!large->range().in_range(address)) return NULL;
return large->address();
}
return NULL;
}
void *Zone::block_start(void *address) {
if (in_subzone_memory(address)) {
return Subzone::subzone(address)->block_start(address);
} else {
return block_start_large(address);
}
}
usword_t Zone::block_size(void *block) {
if (in_subzone_memory(block)) {
return Subzone::subzone(block)->size(block);
} else if (in_large_memory(block)) {
return Large::size(block);
}
return 0;
}
int Zone::block_layout(void *block) {
if (in_subzone_memory(block)) {
return Subzone::subzone(block)->layout(block);
} else if (in_large_memory(block)) {
return Large::layout(block);
}
return AUTO_TYPE_UNKNOWN;
}
void Zone::block_set_layout(void *block, int layout) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
SpinLock lock(subzone->admin()->lock());
subzone->set_layout(block, layout);
} else if (in_large_memory(block)) {
Large::set_layout(block, layout);
}
}
int Zone::get_refcount_small_medium(Subzone *subzone, void *block) {
int refcount = subzone->refcount(block);
if (refcount == 2) {
SpinLock lock(&_retains_lock);
PtrIntHashMap::iterator retain_iter = _retains.find(block);
if (retain_iter != _retains.end() && retain_iter->first == block) {
refcount = retain_iter->second;
}
}
return refcount;
}
int Zone::inc_refcount_small_medium(Subzone *subzone, void *block) {
usword_t q = subzone->quantum_index(block);
int refcount = subzone->refcount(q);
if (refcount == 2) {
SpinLock lock(&_retains_lock);
PtrIntHashMap::iterator retain_iter = _retains.find(block);
if (retain_iter != _retains.end() && retain_iter->first == block) {
refcount = ++retain_iter->second;
} else {
refcount = (_retains[block] = 3);
}
} else {
subzone->incr_refcount(q);
}
return refcount;
}
int Zone::dec_refcount_small_medium(Subzone *subzone, void *block) {
usword_t q = subzone->quantum_index(block);
int refcount = subzone->refcount(q);
if (refcount == 2) {
SpinLock lock(&_retains_lock);
PtrIntHashMap::iterator retain_iter = _retains.find(block);
if (retain_iter != _retains.end() && retain_iter->first == block) {
if (--retain_iter->second == 2) {
_retains.erase(retain_iter);
return 2;
} else {
return retain_iter->second;
}
} else {
subzone->decr_refcount(q);
return 1;
}
} else if (refcount == 1) {
subzone->decr_refcount(q);
return 0;
}
malloc_printf("reference count underflow for %p, break on auto_refcount_underflow_error to debug.\n", block);
auto_refcount_underflow_error(block);
return -1;
}
int Zone::block_refcount(void *block) {
if (in_subzone_memory(block)) {
return get_refcount_small_medium(Subzone::subzone(block), block);
} else if (in_large_memory(block)) {
SpinLock lock(&_large_lock);
return Large::refcount(block);
}
return 0;
}
#if 0
void Zone::testRefcounting(void *block) {
for (int j = 0; j < 7; ++j) {
printf("\nloop start refcount is %d for %p\n", block_refcount(block), block);
for (int i = 0; i < 5; ++i) {
block_increment_refcount(block);
printf("after increment, it now has refcount %d\n", block_refcount(block));
}
for (int i = 0; i < 5; ++i) {
block_decrement_refcount(block);
printf("after decrement, it now has refcount %d\n", block_refcount(block));
}
for (int i = 0; i < 5; ++i) {
block_increment_refcount(block);
printf("after increment, it now has refcount %d\n", block_refcount(block));
}
for (int i = 0; i < 5; ++i) {
block_decrement_refcount(block);
printf("after decrement, it now has refcount %d\n", block_refcount(block));
}
printf("maturing block...\n");
Subzone::subzone(block)->mature(block);
}
}
#endif
int Zone::block_increment_refcount(void *block) {
int refcount = 0;
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
SpinLock lock(subzone->admin()->lock());
refcount = inc_refcount_small_medium(subzone, block);
if (refcount == 1) {
ConditionBarrier barrier(&_needs_enlivening, &_enlivening_lock);
if (barrier && !block_is_marked(block)) _enlivening_queue.add(block);
}
} else if (in_large_memory(block)) {
SpinLock lock(&_large_lock);
refcount = Large::refcount(block) + 1;
Large::set_refcount(block, refcount);
if (refcount == 1) {
ConditionBarrier barrier(&_needs_enlivening, &_enlivening_lock);
if (barrier && !block_is_marked(block)) _enlivening_queue.add(block);
}
}
return refcount;
}
int Zone::block_decrement_refcount(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
SpinLock lock(subzone->admin()->lock());
return dec_refcount_small_medium(subzone, block);
} else if (in_large_memory(block)) {
SpinLock lock(&_large_lock);
int refcount = Large::refcount(block);
if (refcount <= 0) {
malloc_printf("reference count underflow for %p, break on auto_refcount_underflow_error to debug\n", block);
auto_refcount_underflow_error(block);
}
else {
refcount = refcount - 1;
Large::set_refcount(block, refcount);
}
return refcount;
}
return 0;
}
void Zone::block_refcount_and_layout(void *block, int *refcount, int *layout) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
SpinLock lock(subzone->admin()->lock());
*refcount = get_refcount_small_medium(subzone, block);
*layout = subzone->layout(block);
} else if (in_large_memory(block)) {
SpinLock lock(&_large_lock);
Large *large = Large::large(block);
*refcount = large->refcount();
*layout = large->layout();
}
}
bool Zone::set_pending(void *block) {
if (!block) return false;
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
unsigned char layout;
if (_is_partial) {
if (!subzone->should_pend_new(block, layout)) return false;
} else {
if (!subzone->should_pend(block, layout)) return false;
}
if (_scanning_associations) pend_associations(block);
if (layout & AUTO_UNSCANNED) return false;
if (_use_pending) {
subzone->set_pending(block);
set_some_pending();
} else {
scan_stack_push_block(block);
}
return true;
} else if (in_large_memory(block)) {
if (!Large::is_start(block)) return false;
Large *large = Large::large(block);
if (_is_partial && !large->is_new()) return false;
if (large->test_set_mark()) return false;
if (_scanning_associations) pend_associations(block);
if (large->layout() & AUTO_UNSCANNED) return false;
if (_use_pending) {
large->set_pending();
set_some_pending();
} else {
scan_stack_push_block(block);
}
return true;
}
return false;
}
void Zone::repend(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
if (!subzone->is_start(block)) return;
usword_t q = subzone->quantum_index(block);
if (subzone->is_marked(q)) return;
subzone->set_mark(q);
if (subzone->layout(q) & AUTO_UNSCANNED) return;
if (_use_pending) {
subzone->set_pending(q);
set_some_pending();
} else {
scan_stack_push_block(block);
}
} else if (in_large_memory(block)) {
if (!Large::is_start(block)) return;
Large *large = Large::large(block);
if (large->is_marked()) return;
large->set_mark();
if (large->layout() & AUTO_UNSCANNED) return;
if (_use_pending) {
large->set_pending();
set_some_pending();
} else {
scan_stack_push_block(block);
}
}
}
void Zone::scan_associations(MemoryScanner &scanner) {
SpinLock lock(&_associations_lock);
_scanning_associations = true;
for (PtrAssocHashMap::iterator i = _associations.begin(); i != _associations.end(); i++) {
void *block = i->first;
if (block_is_marked(block)) {
PtrPtrHashMap &refs = i->second;
for (PtrPtrHashMap::iterator j = refs.begin(); j != refs.end(); j++) {
set_pending(j->second);
}
}
}
scanner.scan_pending_until_done();
_scanning_associations = false;
}
bool Zone::set_write_barrier(void *address, void *value) {
if (in_subzone_memory(address)) {
Subzone *subzone = Subzone::subzone(address);
UnconditionalBarrier condition(&_needs_enlivening, &_enlivening_lock);
if (condition && !block_is_marked(value)) _enlivening_queue.add(value);
*(void **)address = value;
subzone->write_barrier().mark_card(address);
return true;
}
else if (void *block = block_start_large(address)) {
Large *large = Large::large(block);
UnconditionalBarrier condition(&_needs_enlivening, &_enlivening_lock);
if (condition && !block_is_marked(value)) _enlivening_queue.add(value);
*(void **)address = value;
large->write_barrier().mark_card(address);
return true;
}
return false;
}
bool Zone::set_write_barrier_range(void *destination, const usword_t size) {
if (in_subzone_memory(destination)) {
Subzone *subzone = Subzone::subzone(destination);
subzone->write_barrier().mark_cards(destination, size);
return true;
} else if (void *block = block_start_large(destination)) {
Large *large = Large::large(block);
large->write_barrier().mark_cards(destination, size);
return true;
}
return false;
}
bool Zone::set_write_barrier(void *address) {
if (in_subzone_memory(address)) {
Subzone *subzone = Subzone::subzone(address);
subzone->write_barrier().mark_card(address);
return true;
}
else if (void *block = block_start_large(address)) {
Large *large = Large::large(block);
large->write_barrier().mark_card(address);
return true;
}
return false;
}
void Zone::write_barrier_scan_unmarked_content(void *block, const usword_t size, MemoryScanner &scanner) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
WriteBarrier wb = subzone->write_barrier();
wb.scan_ranges(block, size, scanner);
} else if (in_large_memory(block)) {
Large *large = Large::large(block);
WriteBarrier wb = large->write_barrier();
wb.scan_ranges(block, size, scanner);
}
}
struct mark_write_barriers_untouched_visitor {
inline bool visit(Zone *zone, WriteBarrier &wb) {
wb.mark_cards_untouched();
return true;
}
};
void Zone::mark_write_barriers_untouched() {
mark_write_barriers_untouched_visitor visitor;
visitWriteBarriers(this, visitor);
}
struct clear_untouched_write_barriers_visitor {
inline bool visit(Zone *zone, WriteBarrier &wb) {
wb.clear_untouched_cards();
return true;
}
};
void Zone::clear_untouched_write_barriers() {
clear_untouched_write_barriers_visitor visitor;
visitWriteBarriers(this, visitor);
}
struct clear_all_write_barriers_visitor {
inline bool visit(Zone *zone, WriteBarrier &wb) {
wb.clear();
return true;
}
};
void Zone::clear_all_write_barriers() {
clear_all_write_barriers_visitor visitor;
visitWriteBarriers(this, visitor);
}
struct reset_all_marks_visitor {
inline bool visit(Zone *zone, Subzone *subzone, usword_t q, void *block) {
subzone->clear_mark(q);
return true;
}
inline bool visit(Zone *zone, Large *large, void *block) {
large->clear_mark();
return true;
}
};
void Zone::reset_all_marks() {
#if 1
for (Region *region = _region_list; region != NULL; region = region->next()) {
region->clear_all_marks();
}
SpinLock lock(&_large_lock);
for (Large *large = _large_list; large != NULL; large = large->next()) {
large->clear_mark();
}
#else
reset_all_marks_visitor visitor;
BlockIterator<reset_all_marks_visitor> iterator(this, visitor);
iterator.visit();
#endif
}
struct reset_all_marks_and_pending_visitor {
inline bool visit(Zone *zone, Subzone *subzone, usword_t q) {
subzone->clear_mark(q);
subzone->clear_pending(q);
return true;
}
inline bool visit(Zone *zone, Large *large) {
large->clear_mark();
large->clear_pending();
return true;
}
};
void Zone::reset_all_marks_and_pending() {
#if 1
for (Region *region = _region_list; region != NULL; region = region->next()) {
region->clear_all_marks();
region->clear_all_pending();
}
SpinLock lock(&_large_lock);
for (Large *large = _large_list; large != NULL; large = large->next()) {
large->clear_mark();
large->clear_pending();
}
#else
reset_all_marks_and_pending_visitor visitor;
visitAllocatedBlocks(this, visitor);
#endif
}
struct statistics_visitor {
Statistics &_stats;
Region *_last_region;
Subzone *_last_subzone;
statistics_visitor(Statistics &stats)
: _stats(stats)
, _last_region(NULL)
, _last_subzone(NULL)
{}
inline bool visit(Zone *zone, Subzone *subzone, usword_t q) {
if (_last_region != subzone->admin()->region()) {
_last_region = subzone->admin()->region();
_stats.add_admin(Region::bytes_needed());
}
if (_last_subzone != subzone) {
_last_subzone = subzone;
_stats.add_admin(subzone_write_barrier_max);
_stats.add_allocated(subzone->allocation_size());
_stats.add_dirty(subzone->allocation_size());
}
_stats.add_count(1);
_stats.add_size(subzone->size(q));
return true;
}
inline bool visit(Zone *zone, Large *large) {
_stats.add_admin(large->vm_size() - large->size());
_stats.add_count(1);
_stats.add_size(large->size());
return true;
}
};
void Zone::statistics(Statistics &stats) {
statistics_visitor visitor(stats);
visitAllocatedBlocks(this, visitor);
}
void Zone::block_collector() {
pthread_mutex_lock(&collection_mutex);
while (collection_status_state) {
pthread_cond_wait(&collection_status, &collection_mutex);
}
suspend_all_registered_threads();
}
void Zone::unblock_collector() {
resume_all_registered_threads();
pthread_mutex_unlock(&collection_mutex);
}
void Zone::collect(bool is_partial, void *current_stack_bottom, auto_date_t *enliveningBegin) {
auto_trace_phase_begin((auto_zone_t*)this, is_partial, AUTO_TRACE_SCANNING_PHASE);
set_needs_enlivening();
Collector collector(this, current_stack_bottom, is_partial);
collector.collect(false);
if (_scan_stack.is_overflow()) {
_stats.increment_stack_overflow_count();
reset_all_marks_and_pending();
ASSERTION(_enlivening_lock != 0);
spin_unlock(&_enlivening_lock);
collector.collect(true);
}
_scan_stack.reset();
auto_trace_phase_end((auto_zone_t*)this, is_partial, AUTO_TRACE_SCANNING_PHASE,
collector.blocks_scanned(), collector.bytes_scanned());
auto_weak_callback_block_t *callbacks = NULL;
*enliveningBegin = collector.scan_end;
_stats.increment_gc_count(is_partial);
_garbage_list.clear_count();
scavenge_blocks();
if (has_weak_references()) {
auto_trace_phase_begin((auto_zone_t*)this, is_partial, AUTO_TRACE_WEAK_REFERENCE_PHASE);
uintptr_t weak_referents, weak_references;
callbacks = weak_clear_references(this, _garbage_list.count(), _garbage_list.buffer(), &weak_referents, &weak_references);
auto_trace_phase_end((auto_zone_t*)this, is_partial, AUTO_TRACE_WEAK_REFERENCE_PHASE, weak_referents, weak_references * sizeof(void*));
}
if (!is_partial) {
mark_write_barriers_untouched();
_repair_write_barrier = true;
} else if (_repair_write_barrier) {
clear_untouched_write_barriers();
_repair_write_barrier = false;
}
clear_needs_enlivening();
spin_unlock(&_enlivening_lock);
weak_call_callbacks(callbacks);
if (Environment::_agc_env._print_stats) {
malloc_printf("cnt=%d, sz=%d, max=%d, al=%d, admin=%d\n",
_stats.count(),
_stats.size(),
_stats.dirty_size(),
_stats.allocated(),
_stats.admin_size());
}
}
struct scavenge_blocks_visitor {
PointerList& _list;
scavenge_blocks_visitor(PointerList& list) : _list(list) {}
inline bool visit(Zone *zone, Subzone *subzone, usword_t q) {
if (subzone->is_new(q)) subzone->mature(q);
if (!subzone->is_marked(q)) _list.add((vm_address_t)subzone->quantum_address(q));
return true;
}
inline bool visit(Zone *zone, Large *large) {
if (large->is_new()) large->mature();
if (!large->is_marked() && !large->is_freed()) _list.add((vm_address_t)large->address());
return true;
}
};
void Zone::scavenge_blocks() {
scavenge_blocks_visitor visitor(_garbage_list);
visitAllocatedBlocks(this, visitor);
}
void Zone::register_thread() {
Thread *thread = (Thread *)pthread_getspecific(_registered_thread_key);
if (thread == NULL) {
pthread_t pthread = pthread_self();
thread = new Thread(this, pthread, pthread_mach_thread_np(pthread));
{
SpinLock lock(&_registered_threads_lock);
thread->set_next(_registered_threads);
_registered_threads = thread;
}
pthread_setspecific(_registered_thread_key, thread);
}
thread->retain();
}
void Zone::unregister_thread() {
Thread *thread = (Thread *)pthread_getspecific(_registered_thread_key);
if (thread && thread->release() == 0) {
pthread_setspecific(_registered_thread_key, NULL);
{
SpinLock lock(&_registered_threads_lock);
thread->unlink(&_registered_threads);
}
delete thread;
}
}
void Zone::suspend_all_registered_threads() {
SpinLock lock(&_registered_threads_lock);
Thread *thread = _registered_threads;
while (thread) {
Thread *next = thread->next();
if (!thread->suspend()) thread->unlink(&_registered_threads);
thread = next;
}
}
void Zone::resume_all_registered_threads() {
SpinLock lock(&_registered_threads_lock);
Thread *thread = _registered_threads;
while (thread) {
Thread *next = thread->next();
if (!thread->resume()) thread->unlink(&_registered_threads);
thread = next;
}
}
struct print_all_blocks_visitor {
Region *_last_region; Subzone *_last_subzone; bool _is_large;
print_all_blocks_visitor() : _last_region(NULL), _is_large(false) {}
inline bool visit(Zone *zone, Subzone *subzone, usword_t q) {
if (_last_region != subzone->admin()->region()) {
_last_region = subzone->admin()->region();
malloc_printf("Region [%p..%p]\n", _last_region->address(), _last_region->end());
}
void *block = subzone->quantum_address(q);
if (subzone->is_start_lite(q)) {
zone->print_block(block);
} else {
FreeListNode *node = (FreeListNode *)block;
malloc_printf(" %p(%6d) ### free\n", block, node->size());
}
return true;
}
inline bool visit(Zone *zone, Large *large) {
if (!_is_large) {
malloc_printf("Large Blocks\n");
_is_large = true;
}
zone->print_block(large->address());
return true;
}
};
void Zone::print_all_blocks() {
SpinLock lock(&_region_lock);
print_all_blocks_visitor visitor;
AllBlockIterator<print_all_blocks_visitor> iterator(this, visitor);
iterator.visit();
}
void Zone::print_block(void *block) {
print_block(block, "");
}
void Zone::print_block(void *block, const char *tag) {
block = block_start(block);
if (!block) malloc_printf("%s%p is not a block", tag, block);
if (block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
usword_t q = subzone->quantum_index(block);
int rc = block_refcount(block);
int layout = subzone->layout(q);
bool is_unscanned = (layout & AUTO_UNSCANNED) != 0;
bool is_object = (layout & AUTO_OBJECT) != 0;
bool is_new = subzone->is_new(q);
bool is_marked = subzone->is_marked(q);
bool is_pending = false;
char *class_name = "";
if (is_object) {
void *isa = *(void **)block;
if (isa) class_name = *(char **)displace(isa, 8);
}
malloc_printf("%s%p(%6d) %s %s %s %s %s rc(%d) q(%u) subzone(%p) %s\n",
tag, block, (unsigned)subzone->size(q),
is_unscanned ? " " : "scn",
is_object ? "obj" : "mem",
is_new ? "new" : " ",
is_marked ? "mark" : " ",
is_pending ? "pend" : " ",
rc,
q, subzone,
class_name);
} else if (in_large_memory(block)) {
Large *large = Large::large(block);
int rc = block_refcount(block);
int layout = large->layout();
bool is_unscanned = (layout & AUTO_UNSCANNED) != 0;
bool is_object = (layout & AUTO_OBJECT) != 0;
bool is_new = large->is_new();
bool is_marked = large->is_marked();
bool is_pending = false;
char *class_name = "";
if (is_object) {
void *isa = *(void **)block;
if (isa) class_name = *(char **)displace(isa, 8); }
malloc_printf("%s%p(%6d) %s %s %s %s %s rc(%d) %s\n",
tag, block, (unsigned)large->size(),
is_unscanned ? " " : "scn",
is_object ? "obj" : "mem",
is_new ? "new" : " ",
is_marked ? "mark" : " ",
is_pending ? "pend" : " ",
rc,
class_name);
}
return;
}
malloc_printf("%s%p is not a block", tag, block);
}
};