#include "config.h"
#include "MarkedBlock.h"
#include "JSCell.h"
#include "JSObject.h"
#include "ScopeChain.h"
namespace JSC {
MarkedBlock* MarkedBlock::create(Heap* heap, size_t cellSize)
{
PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockSize, OSAllocator::JSGCHeapPages);
if (!static_cast<bool>(allocation))
CRASH();
return new (allocation.base()) MarkedBlock(allocation, heap, cellSize);
}
void MarkedBlock::destroy(MarkedBlock* block)
{
block->m_allocation.deallocate();
}
MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize)
: m_inNewSpace(false)
, m_allocation(allocation)
, m_heap(heap)
{
initForCellSize(cellSize);
}
void MarkedBlock::initForCellSize(size_t cellSize)
{
m_atomsPerCell = (cellSize + atomSize - 1) / atomSize;
m_endAtom = atomsPerBlock - m_atomsPerCell + 1;
setDestructorState(SomeFreeCellsStillHaveObjects);
}
template<MarkedBlock::DestructorState specializedDestructorState>
void MarkedBlock::callDestructor(JSCell* cell, void* jsFinalObjectVPtr)
{
if (specializedDestructorState == FreeCellsDontHaveObjects)
return;
void* vptr = cell->vptr();
if (specializedDestructorState == AllFreeCellsHaveObjects || vptr) {
if (vptr == jsFinalObjectVPtr) {
JSFinalObject* object = reinterpret_cast<JSFinalObject*>(cell);
object->JSFinalObject::~JSFinalObject();
} else
cell->~JSCell();
}
}
template<MarkedBlock::DestructorState specializedDestructorState>
void MarkedBlock::specializedReset()
{
void* jsFinalObjectVPtr = m_heap->globalData()->jsFinalObjectVPtr;
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell)
callDestructor<specializedDestructorState>(reinterpret_cast<JSCell*>(&atoms()[i]), jsFinalObjectVPtr);
}
void MarkedBlock::reset()
{
switch (destructorState()) {
case FreeCellsDontHaveObjects:
case SomeFreeCellsStillHaveObjects:
specializedReset<SomeFreeCellsStillHaveObjects>();
break;
default:
ASSERT(destructorState() == AllFreeCellsHaveObjects);
specializedReset<AllFreeCellsHaveObjects>();
break;
}
}
template<MarkedBlock::DestructorState specializedDestructorState>
void MarkedBlock::specializedSweep()
{
if (specializedDestructorState != FreeCellsDontHaveObjects) {
void* jsFinalObjectVPtr = m_heap->globalData()->jsFinalObjectVPtr;
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
if (m_marks.get(i))
continue;
JSCell* cell = reinterpret_cast<JSCell*>(&atoms()[i]);
callDestructor<specializedDestructorState>(cell, jsFinalObjectVPtr);
cell->setVPtr(0);
}
setDestructorState(FreeCellsDontHaveObjects);
}
}
void MarkedBlock::sweep()
{
HEAP_DEBUG_BLOCK(this);
switch (destructorState()) {
case FreeCellsDontHaveObjects:
break;
case SomeFreeCellsStillHaveObjects:
specializedSweep<SomeFreeCellsStillHaveObjects>();
break;
default:
ASSERT(destructorState() == AllFreeCellsHaveObjects);
specializedSweep<AllFreeCellsHaveObjects>();
break;
}
}
template<MarkedBlock::DestructorState specializedDestructorState>
ALWAYS_INLINE MarkedBlock::FreeCell* MarkedBlock::produceFreeList()
{
void* jsFinalObjectVPtr = m_heap->globalData()->jsFinalObjectVPtr;
FreeCell* result = 0;
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
if (!m_marks.testAndSet(i)) {
JSCell* cell = reinterpret_cast<JSCell*>(&atoms()[i]);
if (specializedDestructorState != FreeCellsDontHaveObjects)
callDestructor<specializedDestructorState>(cell, jsFinalObjectVPtr);
FreeCell* freeCell = reinterpret_cast<FreeCell*>(cell);
freeCell->next = result;
result = freeCell;
}
}
setDestructorState(AllFreeCellsHaveObjects);
return result;
}
MarkedBlock::FreeCell* MarkedBlock::lazySweep()
{
HEAP_DEBUG_BLOCK(this);
switch (destructorState()) {
case FreeCellsDontHaveObjects:
return produceFreeList<FreeCellsDontHaveObjects>();
case SomeFreeCellsStillHaveObjects:
return produceFreeList<SomeFreeCellsStillHaveObjects>();
default:
ASSERT(destructorState() == AllFreeCellsHaveObjects);
return produceFreeList<AllFreeCellsHaveObjects>();
}
}
MarkedBlock::FreeCell* MarkedBlock::blessNewBlockForFastPath()
{
HEAP_DEBUG_BLOCK(this);
FreeCell* result = 0;
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
m_marks.set(i);
FreeCell* freeCell = reinterpret_cast<FreeCell*>(&atoms()[i]);
freeCell->next = result;
result = freeCell;
}
setDestructorState(AllFreeCellsHaveObjects);
return result;
}
void MarkedBlock::blessNewBlockForSlowPath()
{
HEAP_DEBUG_BLOCK(this);
m_marks.clearAll();
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell)
reinterpret_cast<FreeCell*>(&atoms()[i])->setNoObject();
setDestructorState(FreeCellsDontHaveObjects);
}
void MarkedBlock::canonicalizeBlock(FreeCell* firstFreeCell)
{
HEAP_DEBUG_BLOCK(this);
ASSERT(destructorState() == AllFreeCellsHaveObjects);
if (firstFreeCell) {
for (FreeCell* current = firstFreeCell; current;) {
FreeCell* next = current->next;
size_t i = atomNumber(current);
m_marks.clear(i);
current->setNoObject();
current = next;
}
setDestructorState(SomeFreeCellsStillHaveObjects);
}
}
}