Changeset 161557 in webkit


Ignore:
Timestamp:
Jan 9, 2014 9:21:02 AM (10 years ago)
Author:
commit-queue@webkit.org
Message:

Unreviewed, rolling out r161540.
http://trac.webkit.org/changeset/161540
https://bugs.webkit.org/show_bug.cgi?id=126704

Caused assertion failures on multiple tests (Requested by ap
on #webkit).

Source/JavaScriptCore:

  • bytecode/CodeBlock.cpp:

(JSC::CodeBlock::visitAggregate):

  • bytecode/CodeBlock.h:

(JSC::CodeBlockSet::mark):

  • dfg/DFGOperations.cpp:
  • heap/CodeBlockSet.cpp:

(JSC::CodeBlockSet::add):
(JSC::CodeBlockSet::traceMarked):

  • heap/CodeBlockSet.h:
  • heap/CopiedBlockInlines.h:

(JSC::CopiedBlock::reportLiveBytes):

  • heap/CopiedSpace.cpp:
  • heap/CopiedSpace.h:
  • heap/Heap.cpp:

(JSC::Heap::Heap):
(JSC::Heap::didAbandon):
(JSC::Heap::markRoots):
(JSC::Heap::copyBackingStores):
(JSC::Heap::collectAllGarbage):
(JSC::Heap::collect):
(JSC::Heap::didAllocate):

  • heap/Heap.h:

(JSC::Heap::shouldCollect):
(JSC::Heap::isCollecting):
(JSC::Heap::isWriteBarrierEnabled):
(JSC::Heap::writeBarrier):

  • heap/HeapOperation.h:
  • heap/MarkStack.cpp:

(JSC::MarkStackArray::~MarkStackArray):

  • heap/MarkStack.h:
  • heap/MarkedAllocator.cpp:

(JSC::MarkedAllocator::isPagedOut):
(JSC::MarkedAllocator::tryAllocateHelper):
(JSC::MarkedAllocator::addBlock):
(JSC::MarkedAllocator::removeBlock):

  • heap/MarkedAllocator.h:

(JSC::MarkedAllocator::MarkedAllocator):
(JSC::MarkedAllocator::reset):

  • heap/MarkedBlock.cpp:
  • heap/MarkedBlock.h:

(JSC::MarkedBlock::lastChanceToFinalize):
(JSC::MarkedBlock::didConsumeEmptyFreeList):
(JSC::MarkedBlock::clearMarks):

  • heap/MarkedSpace.cpp:

(JSC::MarkedSpace::~MarkedSpace):
(JSC::MarkedSpace::resetAllocators):
(JSC::MarkedSpace::visitWeakSets):
(JSC::MarkedSpace::reapWeakSets):

  • heap/MarkedSpace.h:

(JSC::ClearMarks::operator()):
(JSC::MarkedSpace::clearMarks):

  • heap/SlotVisitor.cpp:

(JSC::SlotVisitor::~SlotVisitor):

  • heap/SlotVisitor.h:

(JSC::SlotVisitor::sharedData):

  • heap/SlotVisitorInlines.h:

(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyLater):
(JSC::SlotVisitor::reportExtraMemoryUsage):

  • jit/Repatch.cpp:
  • runtime/JSGenericTypedArrayViewInlines.h:

(JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):

  • runtime/JSPropertyNameIterator.h:

(JSC::StructureRareData::setEnumerationCache):

  • runtime/JSString.cpp:

(JSC::JSString::visitChildren):

  • runtime/StructureRareDataInlines.h:

(JSC::StructureRareData::setPreviousID):
(JSC::StructureRareData::setObjectToStringValue):

  • runtime/WeakMapData.cpp:

(JSC::WeakMapData::visitChildren):

Source/WTF:

  • wtf/Bitmap.h:

(WTF::WordType>::count):

Location:
trunk/Source
Files:
31 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/ChangeLog

    r161554 r161557  
     12014-01-09  Commit Queue  <commit-queue@webkit.org>
     2
     3        Unreviewed, rolling out r161540.
     4        http://trac.webkit.org/changeset/161540
     5        https://bugs.webkit.org/show_bug.cgi?id=126704
     6
     7        Caused assertion failures on multiple tests (Requested by ap
     8        on #webkit).
     9
     10        * bytecode/CodeBlock.cpp:
     11        (JSC::CodeBlock::visitAggregate):
     12        * bytecode/CodeBlock.h:
     13        (JSC::CodeBlockSet::mark):
     14        * dfg/DFGOperations.cpp:
     15        * heap/CodeBlockSet.cpp:
     16        (JSC::CodeBlockSet::add):
     17        (JSC::CodeBlockSet::traceMarked):
     18        * heap/CodeBlockSet.h:
     19        * heap/CopiedBlockInlines.h:
     20        (JSC::CopiedBlock::reportLiveBytes):
     21        * heap/CopiedSpace.cpp:
     22        * heap/CopiedSpace.h:
     23        * heap/Heap.cpp:
     24        (JSC::Heap::Heap):
     25        (JSC::Heap::didAbandon):
     26        (JSC::Heap::markRoots):
     27        (JSC::Heap::copyBackingStores):
     28        (JSC::Heap::collectAllGarbage):
     29        (JSC::Heap::collect):
     30        (JSC::Heap::didAllocate):
     31        * heap/Heap.h:
     32        (JSC::Heap::shouldCollect):
     33        (JSC::Heap::isCollecting):
     34        (JSC::Heap::isWriteBarrierEnabled):
     35        (JSC::Heap::writeBarrier):
     36        * heap/HeapOperation.h:
     37        * heap/MarkStack.cpp:
     38        (JSC::MarkStackArray::~MarkStackArray):
     39        * heap/MarkStack.h:
     40        * heap/MarkedAllocator.cpp:
     41        (JSC::MarkedAllocator::isPagedOut):
     42        (JSC::MarkedAllocator::tryAllocateHelper):
     43        (JSC::MarkedAllocator::addBlock):
     44        (JSC::MarkedAllocator::removeBlock):
     45        * heap/MarkedAllocator.h:
     46        (JSC::MarkedAllocator::MarkedAllocator):
     47        (JSC::MarkedAllocator::reset):
     48        * heap/MarkedBlock.cpp:
     49        * heap/MarkedBlock.h:
     50        (JSC::MarkedBlock::lastChanceToFinalize):
     51        (JSC::MarkedBlock::didConsumeEmptyFreeList):
     52        (JSC::MarkedBlock::clearMarks):
     53        * heap/MarkedSpace.cpp:
     54        (JSC::MarkedSpace::~MarkedSpace):
     55        (JSC::MarkedSpace::resetAllocators):
     56        (JSC::MarkedSpace::visitWeakSets):
     57        (JSC::MarkedSpace::reapWeakSets):
     58        * heap/MarkedSpace.h:
     59        (JSC::ClearMarks::operator()):
     60        (JSC::MarkedSpace::clearMarks):
     61        * heap/SlotVisitor.cpp:
     62        (JSC::SlotVisitor::~SlotVisitor):
     63        * heap/SlotVisitor.h:
     64        (JSC::SlotVisitor::sharedData):
     65        * heap/SlotVisitorInlines.h:
     66        (JSC::SlotVisitor::internalAppend):
     67        (JSC::SlotVisitor::copyLater):
     68        (JSC::SlotVisitor::reportExtraMemoryUsage):
     69        * jit/Repatch.cpp:
     70        * runtime/JSGenericTypedArrayViewInlines.h:
     71        (JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):
     72        * runtime/JSPropertyNameIterator.h:
     73        (JSC::StructureRareData::setEnumerationCache):
     74        * runtime/JSString.cpp:
     75        (JSC::JSString::visitChildren):
     76        * runtime/StructureRareDataInlines.h:
     77        (JSC::StructureRareData::setPreviousID):
     78        (JSC::StructureRareData::setObjectToStringValue):
     79        * runtime/WeakMapData.cpp:
     80        (JSC::WeakMapData::visitChildren):
     81
    1822014-01-09  Andreas Kling  <akling@apple.com>
    283
  • trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp

    r161540 r161557  
    19551955        otherBlock->visitAggregate(visitor);
    19561956
    1957     visitor.reportExtraMemoryUsage(ownerExecutable(), sizeof(CodeBlock));
     1957    visitor.reportExtraMemoryUsage(sizeof(CodeBlock));
    19581958    if (m_jitCode)
    1959         visitor.reportExtraMemoryUsage(ownerExecutable(), m_jitCode->size());
     1959        visitor.reportExtraMemoryUsage(m_jitCode->size());
    19601960    if (m_instructions.size()) {
    19611961        // Divide by refCount() because m_instructions points to something that is shared
     
    19631963        // Having each CodeBlock report only its proportional share of the size is one way
    19641964        // of accomplishing this.
    1965         visitor.reportExtraMemoryUsage(ownerExecutable(), m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
     1965        visitor.reportExtraMemoryUsage(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
    19661966    }
    19671967
  • trunk/Source/JavaScriptCore/bytecode/CodeBlock.h

    r161540 r161557  
    12701270   
    12711271    (*iter)->m_mayBeExecuting = true;
    1272     m_currentlyExecuting.append(static_cast<CodeBlock*>(candidateCodeBlock));
    12731272}
    12741273
  • trunk/Source/JavaScriptCore/dfg/DFGOperations.cpp

    r161540 r161557  
    851851
    852852    ASSERT(!object->structure()->outOfLineCapacity());
    853     DeferGC deferGC(vm.heap);
    854853    Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
    855854    object->setButterflyWithoutChangingStructure(vm, result);
     
    862861    NativeCallFrameTracer tracer(&vm, exec);
    863862
    864     DeferGC deferGC(vm.heap);
    865863    Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
    866864    object->setButterflyWithoutChangingStructure(vm, result);
  • trunk/Source/JavaScriptCore/heap/CodeBlockSet.cpp

    r161540 r161557  
    4646void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock)
    4747{
    48     CodeBlock* block = codeBlock.leakRef();
    49     bool isNewEntry = m_set.add(block).isNewEntry;
     48    bool isNewEntry = m_set.add(codeBlock.leakRef()).isNewEntry;
    5049    ASSERT_UNUSED(isNewEntry, isNewEntry);
    5150}
     
    103102        if (!codeBlock->m_mayBeExecuting)
    104103            continue;
    105         codeBlock->ownerExecutable()->visitChildren(codeBlock->ownerExecutable(), visitor);
     104        codeBlock->visitAggregate(visitor);
    106105    }
    107 }
    108 
    109 void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap)
    110 {
    111     for (size_t i = 0; i < m_currentlyExecuting.size(); ++i)
    112         heap->addToRememberedSet(m_currentlyExecuting[i]->ownerExecutable());
    113     m_currentlyExecuting.clear();
    114106}
    115107
  • trunk/Source/JavaScriptCore/heap/CodeBlockSet.h

    r161540 r161557  
    3131#include <wtf/PassRefPtr.h>
    3232#include <wtf/RefPtr.h>
    33 #include <wtf/Vector.h>
    3433
    3534namespace JSC {
    3635
    3736class CodeBlock;
    38 class Heap;
    3937class SlotVisitor;
    4038
     
    6866    void traceMarked(SlotVisitor&);
    6967
    70     // Add all currently executing CodeBlocks to the remembered set to be
    71     // re-scanned during the next collection.
    72     void rememberCurrentlyExecutingCodeBlocks(Heap*);
    73 
    7468private:
    7569    // This is not a set of RefPtr<CodeBlock> because we need to be able to find
     
    7771    // and all, but that seemed like overkill.
    7872    HashSet<CodeBlock* > m_set;
    79     Vector<CodeBlock*> m_currentlyExecuting;
    8073};
    8174
  • trunk/Source/JavaScriptCore/heap/CopiedBlockInlines.h

    r161540 r161557  
    4343    m_liveBytes += bytes;
    4444
    45     if (isPinned())
    46         return;
    47 
    4845    if (!shouldEvacuate()) {
    4946        pin();
  • trunk/Source/JavaScriptCore/heap/CopiedSpace.cpp

    r161540 r161557  
    317317}
    318318
    319 void CopiedSpace::didStartFullCollection()
    320 {
    321     ASSERT(heap()->operationInProgress() == FullCollection);
    322 
    323     ASSERT(m_fromSpace->isEmpty());
    324 
    325     for (CopiedBlock* block = m_toSpace->head(); block; block = block->next())
    326         block->didSurviveGC();
    327 
    328     for (CopiedBlock* block = m_oversizeBlocks.head(); block; block = block->next())
    329         block->didSurviveGC();
    330 }
    331 
    332319} // namespace JSC
  • trunk/Source/JavaScriptCore/heap/CopiedSpace.h

    r161540 r161557  
    6161    CopiedAllocator& allocator() { return m_allocator; }
    6262
    63     void didStartFullCollection();
    64 
    6563    void startedCopying();
    6664    void doneCopying();
     
    8280
    8381    static CopiedBlock* blockFor(void*);
    84 
    85     Heap* heap() const { return m_heap; }
    8682
    8783private:
  • trunk/Source/JavaScriptCore/heap/Heap.cpp

    r161544 r161557  
    254254    , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
    255255    , m_sizeAfterLastCollect(0)
    256     , m_bytesAllocatedThisCycle(0)
    257     , m_bytesAbandonedThisCycle(0)
    258     , m_maxEdenSize(m_minBytesPerCycle)
    259     , m_maxHeapSize(m_minBytesPerCycle)
    260     , m_shouldDoFullCollection(false)
     256    , m_bytesAllocatedLimit(m_minBytesPerCycle)
     257    , m_bytesAllocated(0)
     258    , m_bytesAbandoned(0)
    261259    , m_totalBytesVisited(0)
    262260    , m_totalBytesCopied(0)
     
    272270    , m_handleSet(vm)
    273271    , m_isSafeToCollect(false)
    274     , m_writeBarrierBuffer(256)
     272    , m_writeBarrierBuffer(128)
    275273    , m_vm(vm)
    276274    , m_lastGCLength(0)
     
    335333{
    336334    if (m_activityCallback)
    337         m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle);
    338     m_bytesAbandonedThisCycle += bytes;
     335        m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
     336    m_bytesAbandoned += bytes;
    339337}
    340338
     
    489487    visitor.setup();
    490488    HeapRootVisitor heapRootVisitor(visitor);
    491 
    492     Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size());
    493     m_slotVisitor.markStack().fillVector(rememberedSet);
    494489
    495490    {
     
    596591    }
    597592
    598     {
    599         GCPHASE(ClearRememberedSet);
    600         for (unsigned i = 0; i < rememberedSet.size(); ++i) {
    601             const JSCell* cell = rememberedSet[i];
    602             MarkedBlock::blockFor(cell)->clearRemembered(cell);
    603         }
    604     }
    605 
    606593    GCCOUNTER(VisitedValueCount, visitor.visitCount());
    607594
     
    615602#endif
    616603
    617     if (m_operationInProgress == EdenCollection) {
    618         m_totalBytesVisited += visitor.bytesVisited();
    619         m_totalBytesCopied += visitor.bytesCopied();
    620     } else {
    621         ASSERT(m_operationInProgress == FullCollection);
    622         m_totalBytesVisited = visitor.bytesVisited();
    623         m_totalBytesCopied = visitor.bytesCopied();
    624     }
     604    m_totalBytesVisited = visitor.bytesVisited();
     605    m_totalBytesCopied = visitor.bytesCopied();
    625606#if ENABLE(PARALLEL_GC)
    626607    m_totalBytesVisited += m_sharedData.childBytesVisited();
     
    635616}
    636617
    637 template <HeapOperation collectionType>
    638618void Heap::copyBackingStores()
    639619{
    640     if (collectionType == EdenCollection)
    641         return;
    642 
    643620    m_storageSpace.startedCopying();
    644621    if (m_storageSpace.shouldDoCopyPhase()) {
     
    651628        m_storageSpace.doneCopying();
    652629        m_sharedData.didFinishCopying();
    653     } else
     630    } else 
    654631        m_storageSpace.doneCopying();
    655632}
     
    747724}
    748725
    749 void Heap::addToRememberedSet(const JSCell* cell)
    750 {
    751     ASSERT(cell);
    752     ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
    753     ASSERT(isMarked(cell));
    754     if (isInRememberedSet(cell))
    755         return;
    756     MarkedBlock::blockFor(cell)->setRemembered(cell);
    757     m_slotVisitor.unconditionallyAppend(const_cast<JSCell*>(cell));
    758 }
    759 
    760726void Heap::collectAllGarbage()
    761727{
     
    763729        return;
    764730
    765     m_shouldDoFullCollection = true;
    766731    collect();
    767732
     
    800765        m_vm->prepareToDiscardCode();
    801766    }
    802 
    803     bool isFullCollection = m_shouldDoFullCollection;
    804     if (isFullCollection) {
    805         m_operationInProgress = FullCollection;
    806         m_slotVisitor.clearMarkStack();
    807         m_shouldDoFullCollection = false;
    808         if (Options::logGC())
    809             dataLog("FullCollection, ");
    810     } else {
    811 #if ENABLE(GGC)
    812         m_operationInProgress = EdenCollection;
    813         if (Options::logGC())
    814             dataLog("EdenCollection, ");
    815 #else
    816         m_operationInProgress = FullCollection;
    817         m_slotVisitor.clearMarkStack();
    818         if (Options::logGC())
    819             dataLog("FullCollection, ");
    820 #endif
    821     }
    822     if (m_operationInProgress == FullCollection)
    823         m_extraMemoryUsage = 0;
     767   
     768    m_operationInProgress = Collection;
     769    m_extraMemoryUsage = 0;
    824770
    825771    if (m_activityCallback)
     
    835781        GCPHASE(StopAllocation);
    836782        m_objectSpace.stopAllocating();
    837         if (m_operationInProgress == FullCollection)
    838             m_storageSpace.didStartFullCollection();
    839     }
    840 
    841     {
    842         GCPHASE(FlushWriteBarrierBuffer);
    843         if (m_operationInProgress == EdenCollection)
    844             m_writeBarrierBuffer.flush(*this);
    845         else
    846             m_writeBarrierBuffer.reset();
    847783    }
    848784
     
    861797    }
    862798
    863     if (m_operationInProgress == FullCollection) {
     799    {
    864800        m_blockSnapshot.resize(m_objectSpace.blocks().set().size());
    865801        MarkedBlockSnapshotFunctor functor(m_blockSnapshot);
     
    867803    }
    868804
    869     if (m_operationInProgress == FullCollection)
    870         copyBackingStores<FullCollection>();
    871     else
    872         copyBackingStores<EdenCollection>();
     805    copyBackingStores();
    873806
    874807    {
     
    887820    }
    888821
    889     if (m_operationInProgress == FullCollection)
    890         m_sweeper->startSweeping(m_blockSnapshot);
    891 
    892     {
    893         GCPHASE(AddCurrentlyExecutingCodeBlocksToRememberedSet);
    894         m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this);
    895     }
    896 
    897     m_bytesAbandonedThisCycle = 0;
     822    m_sweeper->startSweeping(m_blockSnapshot);
     823    m_bytesAbandoned = 0;
    898824
    899825    {
     
    906832        HeapStatistics::exitWithFailure();
    907833
    908     if (m_operationInProgress == FullCollection) {
    909         // To avoid pathological GC churn in very small and very large heaps, we set
    910         // the new allocation limit based on the current size of the heap, with a
    911         // fixed minimum.
    912         m_maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
    913         m_maxEdenSize = m_maxHeapSize - currentHeapSize;
    914     } else {
    915         ASSERT(currentHeapSize >= m_sizeAfterLastCollect);
    916         m_maxEdenSize = m_maxHeapSize - currentHeapSize;
    917         double edenToOldGenerationRatio = (double)m_maxEdenSize / (double)m_maxHeapSize;
    918         double minEdenToOldGenerationRatio = 1.0 / 3.0;
    919         if (edenToOldGenerationRatio < minEdenToOldGenerationRatio)
    920             m_shouldDoFullCollection = true;
    921         m_maxHeapSize += currentHeapSize - m_sizeAfterLastCollect;
    922         m_maxEdenSize = m_maxHeapSize - currentHeapSize;
    923     }
    924 
    925834    m_sizeAfterLastCollect = currentHeapSize;
    926835
    927     m_bytesAllocatedThisCycle = 0;
     836    // To avoid pathological GC churn in very small and very large heaps, we set
     837    // the new allocation limit based on the current size of the heap, with a
     838    // fixed minimum.
     839    size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
     840    m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
     841
     842    m_bytesAllocated = 0;
    928843    double lastGCEndTime = WTF::monotonicallyIncreasingTime();
    929844    m_lastGCLength = lastGCEndTime - lastGCStartTime;
     
    931846    if (Options::recordGCPauseTimes())
    932847        HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime);
    933     RELEASE_ASSERT(m_operationInProgress == EdenCollection || m_operationInProgress == FullCollection);
     848    RELEASE_ASSERT(m_operationInProgress == Collection);
    934849
    935850    m_operationInProgress = NoOperation;
     
    949864        dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n");
    950865    }
     866
     867#if ENABLE(ALLOCATION_LOGGING)
     868    dataLogF("JSC GC finishing collection.\n");
     869#endif
    951870}
    952871
     
    998917{
    999918    if (m_activityCallback)
    1000         m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle);
    1001     m_bytesAllocatedThisCycle += bytes;
     919        m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
     920    m_bytesAllocated += bytes;
    1002921}
    1003922
     
    1074993    decrementDeferralDepth();
    1075994    collectIfNecessaryOrDefer();
    1076 }
    1077 
    1078 void Heap::writeBarrier(const JSCell* from)
    1079 {
    1080     ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
    1081     if (!from || !isMarked(from))
    1082         return;
    1083     Heap* heap = Heap::heap(from);
    1084     heap->addToRememberedSet(from);
    1085995}
    1086996
  • trunk/Source/JavaScriptCore/heap/Heap.h

    r161540 r161557  
    9595        static void setMarked(const void*);
    9696
    97         JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);
    98         bool isInRememberedSet(const JSCell* cell) const
    99         {
    100             ASSERT(cell);
    101             ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
    102             return MarkedBlock::blockFor(cell)->isRemembered(cell);
    103         }
    10497        static bool isWriteBarrierEnabled();
    105         JS_EXPORT_PRIVATE static void writeBarrier(const JSCell*);
     98        static void writeBarrier(const JSCell*);
    10699        static void writeBarrier(const JSCell*, JSValue);
    107100        static void writeBarrier(const JSCell*, JSCell*);
     101        static uint8_t* addressOfCardFor(JSCell*);
    108102
    109103        WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; }
     
    127121        // true if collection is in progress
    128122        inline bool isCollecting();
    129         inline HeapOperation operationInProgress() { return m_operationInProgress; }
    130123        // true if an allocation or collection is in progress
    131124        inline bool isBusy();
     
    244237        void markProtectedObjects(HeapRootVisitor&);
    245238        void markTempSortVectors(HeapRootVisitor&);
    246         template <HeapOperation collectionType>
    247239        void copyBackingStores();
    248240        void harvestWeakReferences();
     
    266258        size_t m_sizeAfterLastCollect;
    267259
    268         size_t m_bytesAllocatedThisCycle;
    269         size_t m_bytesAbandonedThisCycle;
    270         size_t m_maxEdenSize;
    271         size_t m_maxHeapSize;
    272         bool m_shouldDoFullCollection;
     260        size_t m_bytesAllocatedLimit;
     261        size_t m_bytesAllocated;
     262        size_t m_bytesAbandoned;
     263
    273264        size_t m_totalBytesVisited;
    274265        size_t m_totalBytesCopied;
     
    281272        size_t m_extraMemoryUsage;
    282273
    283         HashSet<const JSCell*> m_copyingRememberedSet;
    284 
    285274        ProtectCountSet m_protectedValues;
    286275        Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > m_tempSortingVectors;
     
    334323            return false;
    335324        if (Options::gcMaxHeapSize())
    336             return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
    337         return m_bytesAllocatedThisCycle > m_maxEdenSize && m_isSafeToCollect && m_operationInProgress == NoOperation;
     325            return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
     326        return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect && m_operationInProgress == NoOperation;
    338327    }
    339328
     
    345334    bool Heap::isCollecting()
    346335    {
    347         return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
     336        return m_operationInProgress == Collection;
    348337    }
    349338
     
    382371    inline bool Heap::isWriteBarrierEnabled()
    383372    {
    384 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
     373#if ENABLE(WRITE_BARRIER_PROFILING)
    385374        return true;
    386375#else
     
    389378    }
    390379
    391     inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
    392     {
    393 #if ENABLE(WRITE_BARRIER_PROFILING)
     380    inline void Heap::writeBarrier(const JSCell*)
     381    {
    394382        WriteBarrierCounters::countWriteBarrier();
    395 #endif
    396         if (!from || !isMarked(from))
    397             return;
    398         if (!to || isMarked(to))
    399             return;
    400         Heap::heap(from)->addToRememberedSet(from);
    401     }
    402 
    403     inline void Heap::writeBarrier(const JSCell* from, JSValue to)
    404     {
    405 #if ENABLE(WRITE_BARRIER_PROFILING)
     383    }
     384
     385    inline void Heap::writeBarrier(const JSCell*, JSCell*)
     386    {
    406387        WriteBarrierCounters::countWriteBarrier();
    407 #endif
    408         if (!to.isCell())
    409             return;
    410         writeBarrier(from, to.asCell());
     388    }
     389
     390    inline void Heap::writeBarrier(const JSCell*, JSValue)
     391    {
     392        WriteBarrierCounters::countWriteBarrier();
    411393    }
    412394
  • trunk/Source/JavaScriptCore/heap/HeapOperation.h

    r161540 r161557  
    2929namespace JSC {
    3030
    31 enum HeapOperation { NoOperation, Allocation, FullCollection, EdenCollection };
     31enum HeapOperation { NoOperation, Allocation, Collection };
    3232
    3333} // namespace JSC
  • trunk/Source/JavaScriptCore/heap/MarkStack.cpp

    r161540 r161557  
    5858MarkStackArray::~MarkStackArray()
    5959{
    60     ASSERT(m_numberOfSegments == 1);
    61     ASSERT(m_segments.size() == 1);
     60    ASSERT(m_numberOfSegments == 1 && m_segments.size() == 1);
    6261    m_blockAllocator.deallocate(MarkStackSegment::destroy(m_segments.removeHead()));
    63     m_numberOfSegments--;
    64     ASSERT(!m_numberOfSegments);
    65     ASSERT(!m_segments.size());
    66 }
    67 
    68 void MarkStackArray::clear()
    69 {
    70     if (!m_segments.head())
    71         return;
    72     MarkStackSegment* next;
    73     for (MarkStackSegment* current = m_segments.head(); current->next(); current = next) {
    74         next = current->next();
    75         m_segments.remove(current);
    76         m_blockAllocator.deallocate(MarkStackSegment::destroy(current));
    77     }
    78     m_top = 0;
    79     m_numberOfSegments = 1;
    80 #if !ASSERT_DISABLED
    81     m_segments.head()->m_top = 0;
    82 #endif
    8362}
    8463
     
    189168}
    190169
    191 void MarkStackArray::fillVector(Vector<const JSCell*>& vector)
    192 {
    193     ASSERT(vector.size() == size());
    194 
    195     MarkStackSegment* currentSegment = m_segments.head();
    196     if (!currentSegment)
    197         return;
    198 
    199     unsigned count = 0;
    200     for (unsigned i = 0; i < m_top; ++i) {
    201         ASSERT(currentSegment->data()[i]);
    202         vector[count++] = currentSegment->data()[i];
    203     }
    204 
    205     currentSegment = currentSegment->next();
    206     while (currentSegment) {
    207         for (unsigned i = 0; i < s_segmentCapacity; ++i) {
    208             ASSERT(currentSegment->data()[i]);
    209             vector[count++] = currentSegment->data()[i];
    210         }
    211         currentSegment = currentSegment->next();
    212     }
    213 }
    214 
    215170} // namespace JSC
  • trunk/Source/JavaScriptCore/heap/MarkStack.h

    r161540 r161557  
    5353#include "HeapBlock.h"
    5454#include <wtf/StdLibExtras.h>
    55 #include <wtf/Vector.h>
    5655
    5756namespace JSC {
     
    102101    bool isEmpty();
    103102
    104     void fillVector(Vector<const JSCell*>&);
    105     void clear();
    106 
    107103private:
    108104    template <size_t size> struct CapacityFromSize {
  • trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp

    r161540 r161557  
    1111namespace JSC {
    1212
    13 static bool isListPagedOut(double deadline, DoublyLinkedList<MarkedBlock>& list)
     13bool MarkedAllocator::isPagedOut(double deadline)
    1414{
    1515    unsigned itersSinceLastTimeCheck = 0;
    16     MarkedBlock* block = list.head();
     16    MarkedBlock* block = m_blockList.head();
    1717    while (block) {
    1818        block = block->next();
     
    2525        }
    2626    }
    27     return false;
    28 }
    2927
    30 bool MarkedAllocator::isPagedOut(double deadline)
    31 {
    32     if (isListPagedOut(deadline, m_blockList))
    33         return true;
    3428    return false;
    3529}
     
    4337        DelayedReleaseScope delayedReleaseScope(*m_markedSpace);
    4438        if (m_currentBlock) {
    45             ASSERT(m_currentBlock == m_nextBlockToSweep);
     39            ASSERT(m_currentBlock == m_blocksToSweep);
    4640            m_currentBlock->didConsumeFreeList();
    47             m_nextBlockToSweep = m_currentBlock->next();
     41            m_blocksToSweep = m_currentBlock->next();
    4842        }
    4943
    50         MarkedBlock* next;
    51         for (MarkedBlock*& block = m_nextBlockToSweep; block; block = next) {
    52             next = block->next();
    53 
     44        for (MarkedBlock*& block = m_blocksToSweep; block; block = block->next()) {
    5445            MarkedBlock::FreeList freeList = block->sweep(MarkedBlock::SweepToFreeList);
    55            
    5646            if (!freeList.head) {
    5747                block->didConsumeEmptyFreeList();
    58                 m_blockList.remove(block);
    59                 m_blockList.push(block);
    60                 if (!m_lastFullBlock)
    61                     m_lastFullBlock = block;
    6248                continue;
    6349            }
     
    8369    m_freeList.head = head->next;
    8470    ASSERT(head);
    85     m_markedSpace->didAllocateInBlock(m_currentBlock);
    8671    return head;
    8772}
     
    152137   
    153138    m_blockList.append(block);
    154     m_nextBlockToSweep = m_currentBlock = block;
     139    m_blocksToSweep = m_currentBlock = block;
    155140    m_freeList = block->sweep(MarkedBlock::SweepToFreeList);
    156141    m_markedSpace->didAddBlock(block);
     
    163148        m_freeList = MarkedBlock::FreeList();
    164149    }
    165     if (m_nextBlockToSweep == block)
    166         m_nextBlockToSweep = m_nextBlockToSweep->next();
    167 
    168     if (block == m_lastFullBlock)
    169         m_lastFullBlock = m_lastFullBlock->prev();
    170    
     150    if (m_blocksToSweep == block)
     151        m_blocksToSweep = m_blocksToSweep->next();
    171152    m_blockList.remove(block);
    172153}
    173154
    174 void MarkedAllocator::reset()
    175 {
    176     m_lastActiveBlock = 0;
    177     m_currentBlock = 0;
    178     m_freeList = MarkedBlock::FreeList();
    179     if (m_heap->operationInProgress() == FullCollection)
    180         m_lastFullBlock = 0;
    181 
    182     if (m_lastFullBlock)
    183         m_nextBlockToSweep = m_lastFullBlock->next() ? m_lastFullBlock->next() : m_lastFullBlock;
    184     else
    185         m_nextBlockToSweep = m_blockList.head();
    186 }
    187 
    188155} // namespace JSC
  • trunk/Source/JavaScriptCore/heap/MarkedAllocator.h

    r161540 r161557  
    5353    MarkedBlock* m_currentBlock;
    5454    MarkedBlock* m_lastActiveBlock;
    55     MarkedBlock* m_nextBlockToSweep;
    56     MarkedBlock* m_lastFullBlock;
     55    MarkedBlock* m_blocksToSweep;
    5756    DoublyLinkedList<MarkedBlock> m_blockList;
    5857    size_t m_cellSize;
     
    7069    : m_currentBlock(0)
    7170    , m_lastActiveBlock(0)
    72     , m_nextBlockToSweep(0)
    73     , m_lastFullBlock(0)
     71    , m_blocksToSweep(0)
    7472    , m_cellSize(0)
    7573    , m_destructorType(MarkedBlock::None)
     
    103101#endif
    104102    return head;
     103}
     104
     105inline void MarkedAllocator::reset()
     106{
     107    m_lastActiveBlock = 0;
     108    m_currentBlock = 0;
     109    m_freeList = MarkedBlock::FreeList();
     110    m_blocksToSweep = m_blockList.head();
    105111}
    106112
  • trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp

    r161540 r161557  
    198198}
    199199
    200 void MarkedBlock::clearMarks()
    201 {
    202     if (heap()->operationInProgress() == JSC::EdenCollection)
    203         this->clearMarksWithCollectionType<EdenCollection>();
    204     else
    205         this->clearMarksWithCollectionType<FullCollection>();
    206 }
    207 
    208 void MarkedBlock::clearRememberedSet()
    209 {
    210     m_rememberedSet.clearAll();
    211 }
    212 
    213 template <HeapOperation collectionType>
    214 void MarkedBlock::clearMarksWithCollectionType()
    215 {
    216     ASSERT(collectionType == FullCollection || collectionType == EdenCollection);
    217     HEAP_LOG_BLOCK_STATE_TRANSITION(this);
    218 
    219     ASSERT(m_state != New && m_state != FreeListed);
    220     if (collectionType == FullCollection) {
    221         m_marks.clearAll();
    222         m_rememberedSet.clearAll();
    223     }
    224 
    225     // This will become true at the end of the mark phase. We set it now to
    226     // avoid an extra pass to do so later.
    227     m_state = Marked;
    228 }
    229 
    230 void MarkedBlock::lastChanceToFinalize()
    231 {
    232     m_weakSet.lastChanceToFinalize();
    233 
    234     clearNewlyAllocated();
    235     clearMarksWithCollectionType<FullCollection>();
    236     sweep();
    237 }
    238 
    239200MarkedBlock::FreeList MarkedBlock::resumeAllocating()
    240201{
  • trunk/Source/JavaScriptCore/heap/MarkedBlock.h

    r161540 r161557  
    2626#include "HeapBlock.h"
    2727
    28 #include "HeapOperation.h"
    2928#include "WeakSet.h"
    3029#include <wtf/Bitmap.h>
     
    7473
    7574    public:
    76         static const size_t atomSize = 16; // bytes
     75        static const size_t atomSize = 8; // bytes
    7776        static const size_t atomShiftAmount = 4; // log_2(atomSize) FIXME: Change atomSize to 16.
    7877        static const size_t blockSize = 64 * KB;
     
    142141        FreeList resumeAllocating(); // Call this if you canonicalized a block for some non-collection related purpose.
    143142        void didConsumeEmptyFreeList(); // Call this if you sweep a block, but the returned FreeList is empty.
    144         void didSweepToNoAvail(); // Call this if you sweep a block and get an empty free list back.
    145143
    146144        // Returns true if the "newly allocated" bitmap was non-null
     
    148146        bool clearNewlyAllocated();
    149147        void clearMarks();
    150         void clearRememberedSet();
    151         template <HeapOperation collectionType>
    152         void clearMarksWithCollectionType();
    153 
    154148        size_t markCount();
    155149        bool isEmpty();
     
    167161        void setMarked(const void*);
    168162        void clearMarked(const void*);
    169 
    170         void setRemembered(const void*);
    171         void clearRemembered(const void*);
    172         void atomicClearRemembered(const void*);
    173         bool isRemembered(const void*);
    174163
    175164        bool isNewlyAllocated(const void*);
     
    202191        size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom.
    203192#if ENABLE(PARALLEL_GC)
    204         WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_marks;
    205         WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_rememberedSet;
     193        WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic> m_marks;
    206194#else
    207         WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_marks;
    208         WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_rememberedSet;
     195        WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic> m_marks;
    209196#endif
    210197        OwnPtr<WTF::Bitmap<atomsPerBlock>> m_newlyAllocated;
     
    248235    }
    249236
     237    inline void MarkedBlock::lastChanceToFinalize()
     238    {
     239        m_weakSet.lastChanceToFinalize();
     240
     241        clearNewlyAllocated();
     242        clearMarks();
     243        sweep();
     244    }
     245
    250246    inline MarkedAllocator* MarkedBlock::allocator() const
    251247    {
     
    296292
    297293        ASSERT(!m_newlyAllocated);
     294#ifndef NDEBUG
     295        for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell)
     296            ASSERT(m_marks.get(i));
     297#endif
    298298        ASSERT(m_state == FreeListed);
    299299        m_state = Marked;
    300300    }
    301301
     302    inline void MarkedBlock::clearMarks()
     303    {
     304        HEAP_LOG_BLOCK_STATE_TRANSITION(this);
     305
     306        ASSERT(m_state != New && m_state != FreeListed);
     307        m_marks.clearAll();
     308
     309        // This will become true at the end of the mark phase. We set it now to
     310        // avoid an extra pass to do so later.
     311        m_state = Marked;
     312    }
     313
    302314    inline size_t MarkedBlock::markCount()
    303315    {
     
    333345    {
    334346        return (reinterpret_cast<Bits>(p) - reinterpret_cast<Bits>(this)) / atomSize;
    335     }
    336 
    337     inline void MarkedBlock::setRemembered(const void* p)
    338     {
    339         m_rememberedSet.set(atomNumber(p));
    340     }
    341 
    342     inline void MarkedBlock::clearRemembered(const void* p)
    343     {
    344         m_rememberedSet.clear(atomNumber(p));
    345     }
    346 
    347     inline void MarkedBlock::atomicClearRemembered(const void* p)
    348     {
    349         m_rememberedSet.concurrentTestAndClear(atomNumber(p));
    350     }
    351 
    352     inline bool MarkedBlock::isRemembered(const void* p)
    353     {
    354         return m_rememberedSet.get(atomNumber(p));
    355347    }
    356348
  • trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp

    r161540 r161557  
    106106    Free free(Free::FreeAll, this);
    107107    forEachBlock(free);
    108     ASSERT(!m_blocks.set().size());
    109108}
    110109
     
    145144    m_normalDestructorSpace.largeAllocator.reset();
    146145    m_immortalStructureDestructorSpace.largeAllocator.reset();
    147 
    148     m_blocksWithNewObjects.clear();
    149146}
    150147
     
    152149{
    153150    VisitWeakSet visitWeakSet(heapRootVisitor);
    154     if (m_heap->operationInProgress() == EdenCollection) {
    155         for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
    156             visitWeakSet(m_blocksWithNewObjects[i]);
    157     } else
    158         forEachBlock(visitWeakSet);
     151    forEachBlock(visitWeakSet);
    159152}
    160153
    161154void MarkedSpace::reapWeakSets()
    162155{
    163     if (m_heap->operationInProgress() == EdenCollection) {
    164         for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
    165             m_blocksWithNewObjects[i]->reapWeakSet();
    166     } else
    167         forEachBlock<ReapWeakSet>();
     156    forEachBlock<ReapWeakSet>();
    168157}
    169158
     
    317306}
    318307
    319 #ifndef NDEBUG
    320 struct VerifyMarked : MarkedBlock::VoidFunctor {
    321     void operator()(MarkedBlock* block) { ASSERT(block->needsSweeping()); }
    322 };
    323 #endif
    324 
    325 void MarkedSpace::clearMarks()
    326 {
    327     if (m_heap->operationInProgress() == EdenCollection) {
    328         for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
    329             m_blocksWithNewObjects[i]->clearMarks();
    330     } else
    331         forEachBlock<ClearMarks>();
    332 #ifndef NDEBUG
    333     forEachBlock<VerifyMarked>();
    334 #endif
    335 }
    336 
    337308void MarkedSpace::willStartIterating()
    338309{
  • trunk/Source/JavaScriptCore/heap/MarkedSpace.h

    r161540 r161557  
    4747
    4848struct ClearMarks : MarkedBlock::VoidFunctor {
    49     void operator()(MarkedBlock* block)
    50     {
    51         block->clearMarks();
    52     }
    53 };
    54 
    55 struct ClearRememberedSet : MarkedBlock::VoidFunctor {
    56     void operator()(MarkedBlock* block)
    57     {
    58         block->clearRememberedSet();
    59     }
     49    void operator()(MarkedBlock* block) { block->clearMarks(); }
    6050};
    6151
     
    116106    void didAddBlock(MarkedBlock*);
    117107    void didConsumeFreeList(MarkedBlock*);
    118     void didAllocateInBlock(MarkedBlock*);
    119108
    120109    void clearMarks();
    121     void clearRememberedSet();
    122110    void clearNewlyAllocated();
    123111    void sweep();
     
    163151    bool m_isIterating;
    164152    MarkedBlockSet m_blocks;
    165     Vector<MarkedBlock*> m_blocksWithNewObjects;
    166153
    167154    DelayedReleaseScope* m_currentDelayedReleaseScope;
     
    276263}
    277264
    278 inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block)
    279 {
    280     m_blocksWithNewObjects.append(block);
    281 }
    282 
    283 inline void MarkedSpace::clearRememberedSet()
    284 {
    285     forEachBlock<ClearRememberedSet>();
     265inline void MarkedSpace::clearMarks()
     266{
     267    forEachBlock<ClearMarks>();
    286268}
    287269
  • trunk/Source/JavaScriptCore/heap/SlotVisitor.cpp

    r161540 r161557  
    3434SlotVisitor::~SlotVisitor()
    3535{
    36     clearMarkStack();
     36    ASSERT(m_stack.isEmpty());
    3737}
    3838
     
    6262        m_shouldHashCons = false;
    6363    }
    64 }
    65 
    66 void SlotVisitor::clearMarkStack()
    67 {
    68     m_stack.clear();
    6964}
    7065
  • trunk/Source/JavaScriptCore/heap/SlotVisitor.h

    r161540 r161557  
    5050    ~SlotVisitor();
    5151
    52     MarkStackArray& markStack() { return m_stack; }
    53 
    54     Heap* heap() const;
    55 
    5652    void append(ConservativeRoots&);
    5753   
     
    6662    template<typename T>
    6763    void appendUnbarrieredWeak(Weak<T>*);
    68     void unconditionallyAppend(JSCell*);
    6964   
    7065    void addOpaqueRoot(void*);
     
    7368    int opaqueRootCount();
    7469
    75     GCThreadSharedData& sharedData() const { return m_shared; }
     70    GCThreadSharedData& sharedData() { return m_shared; }
    7671    bool isEmpty() { return m_stack.isEmpty(); }
    7772
    7873    void setup();
    7974    void reset();
    80     void clearMarkStack();
    8175
    8276    size_t bytesVisited() const { return m_bytesVisited; }
     
    9690    void copyLater(JSCell*, CopyToken, void*, size_t);
    9791   
    98     void reportExtraMemoryUsage(JSCell* owner, size_t);
     92    void reportExtraMemoryUsage(size_t size);
    9993   
    10094    void addWeakReferenceHarvester(WeakReferenceHarvester*);
  • trunk/Source/JavaScriptCore/heap/SlotVisitorInlines.h

    r161540 r161557  
    106106    MARK_LOG_CHILD(*this, cell);
    107107
    108     unconditionallyAppend(cell);
    109 }
    110 
    111 ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell)
    112 {
    113     ASSERT(Heap::isMarked(cell));
    114     m_visitCount++;
    115        
    116108    // Should never attempt to mark something that is zapped.
    117109    ASSERT(!cell->isZapped());
     
    227219{
    228220    ASSERT(bytes);
    229     // We don't do any copying during EdenCollections.
    230     ASSERT(heap()->operationInProgress() != EdenCollection);
    231 
    232221    m_bytesCopied += bytes;
    233222
     
    238227    }
    239228
     229    if (block->isPinned())
     230        return;
     231
    240232    block->reportLiveBytes(owner, token, bytes);
    241233}
    242234   
    243 inline void SlotVisitor::reportExtraMemoryUsage(JSCell* owner, size_t size)
    244 {
    245     // We don't want to double-count the extra memory that was reported in previous collections.
    246     if (heap()->operationInProgress() == EdenCollection && MarkedBlock::blockFor(owner)->isRemembered(owner))
    247         return;
    248 
     235inline void SlotVisitor::reportExtraMemoryUsage(size_t size)
     236{
    249237    size_t* counter = &m_shared.m_vm->heap.m_extraMemoryUsage;
    250238   
     
    260248}
    261249
    262 inline Heap* SlotVisitor::heap() const
    263 {
    264     return &sharedData().m_vm->heap;
    265 }
    266 
    267250} // namespace JSC
    268251
  • trunk/Source/JavaScriptCore/jit/Repatch.cpp

    r161540 r161557  
    4040#include "RepatchBuffer.h"
    4141#include "ScratchRegisterAllocator.h"
    42 #include "StackAlignment.h"
    4342#include "StructureRareDataInlines.h"
    4443#include "StructureStubClearingWatchpoint.h"
  • trunk/Source/JavaScriptCore/runtime/JSGenericTypedArrayViewInlines.h

    r161540 r161557  
    448448       
    449449    case OversizeTypedArray: {
    450         visitor.reportExtraMemoryUsage(thisObject, thisObject->byteSize());
     450        visitor.reportExtraMemoryUsage(thisObject->byteSize());
    451451        break;
    452452    }
  • trunk/Source/JavaScriptCore/runtime/JSPropertyNameIterator.h

    r161540 r161557  
    110110    }
    111111   
    112     inline void StructureRareData::setEnumerationCache(VM& vm, const Structure*, JSPropertyNameIterator* value)
     112    inline void StructureRareData::setEnumerationCache(VM& vm, const Structure* owner, JSPropertyNameIterator* value)
    113113    {
    114         m_enumerationCache.set(vm, this, value);
     114        m_enumerationCache.set(vm, owner, value);
    115115    }
    116116
  • trunk/Source/JavaScriptCore/runtime/JSString.cpp

    r161540 r161557  
    7373        StringImpl* impl = thisObject->m_value.impl();
    7474        ASSERT(impl);
    75         visitor.reportExtraMemoryUsage(thisObject, impl->costDuringGC());
     75        visitor.reportExtraMemoryUsage(impl->costDuringGC());
    7676    }
    7777}
  • trunk/Source/JavaScriptCore/runtime/StructureRareDataInlines.h

    r161540 r161557  
    3636}
    3737
    38 inline void StructureRareData::setPreviousID(VM& vm, Structure*, Structure* structure)
     38inline void StructureRareData::setPreviousID(VM& vm, Structure* transition, Structure* structure)
    3939{
    40     m_previous.set(vm, this, structure);
     40    m_previous.set(vm, transition, structure);
    4141}
    4242
     
    5151}
    5252
    53 inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell*, JSString* value)
     53inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell* owner, JSString* value)
    5454{
    55     m_objectToStringValue.set(vm, this, value);
     55    m_objectToStringValue.set(vm, owner, value);
    5656}
    5757
  • trunk/Source/JavaScriptCore/runtime/WeakMapData.cpp

    r161540 r161557  
    6565    // This isn't exact, but it is close enough, and proportional to the actual
    6666    // external mermory usage.
    67     visitor.reportExtraMemoryUsage(thisObj, thisObj->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>)));
     67    visitor.reportExtraMemoryUsage(thisObj->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>)));
    6868}
    6969
  • trunk/Source/WTF/ChangeLog

    r161540 r161557  
     12014-01-09  Commit Queue  <commit-queue@webkit.org>
     2
     3        Unreviewed, rolling out r161540.
     4        http://trac.webkit.org/changeset/161540
     5        https://bugs.webkit.org/show_bug.cgi?id=126704
     6
     7        Caused assertion failures on multiple tests (Requested by ap
     8        on #webkit).
     9
     10        * wtf/Bitmap.h:
     11        (WTF::WordType>::count):
     12
    1132014-01-07  Mark Hahnenberg  <mhahnenberg@apple.com>
    214
  • trunk/Source/WTF/wtf/Bitmap.h

    r161540 r161557  
    197197    }
    198198    for (size_t i = start / wordSize; i < words; ++i)
    199         result += WTF::bitCount(static_cast<unsigned>(bits[i]));
     199        result += WTF::bitCount(bits[i]);
    200200    return result;
    201201}
Note: See TracChangeset for help on using the changeset viewer.