Changeset 161557 in webkit
- Timestamp:
- Jan 9, 2014 9:21:02 AM (10 years ago)
- Location:
- trunk/Source
- Files:
-
- 31 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r161554 r161557 1 2014-01-09 Commit Queue <commit-queue@webkit.org> 2 3 Unreviewed, rolling out r161540. 4 http://trac.webkit.org/changeset/161540 5 https://bugs.webkit.org/show_bug.cgi?id=126704 6 7 Caused assertion failures on multiple tests (Requested by ap 8 on #webkit). 9 10 * bytecode/CodeBlock.cpp: 11 (JSC::CodeBlock::visitAggregate): 12 * bytecode/CodeBlock.h: 13 (JSC::CodeBlockSet::mark): 14 * dfg/DFGOperations.cpp: 15 * heap/CodeBlockSet.cpp: 16 (JSC::CodeBlockSet::add): 17 (JSC::CodeBlockSet::traceMarked): 18 * heap/CodeBlockSet.h: 19 * heap/CopiedBlockInlines.h: 20 (JSC::CopiedBlock::reportLiveBytes): 21 * heap/CopiedSpace.cpp: 22 * heap/CopiedSpace.h: 23 * heap/Heap.cpp: 24 (JSC::Heap::Heap): 25 (JSC::Heap::didAbandon): 26 (JSC::Heap::markRoots): 27 (JSC::Heap::copyBackingStores): 28 (JSC::Heap::collectAllGarbage): 29 (JSC::Heap::collect): 30 (JSC::Heap::didAllocate): 31 * heap/Heap.h: 32 (JSC::Heap::shouldCollect): 33 (JSC::Heap::isCollecting): 34 (JSC::Heap::isWriteBarrierEnabled): 35 (JSC::Heap::writeBarrier): 36 * heap/HeapOperation.h: 37 * heap/MarkStack.cpp: 38 (JSC::MarkStackArray::~MarkStackArray): 39 * heap/MarkStack.h: 40 * heap/MarkedAllocator.cpp: 41 (JSC::MarkedAllocator::isPagedOut): 42 (JSC::MarkedAllocator::tryAllocateHelper): 43 (JSC::MarkedAllocator::addBlock): 44 (JSC::MarkedAllocator::removeBlock): 45 * heap/MarkedAllocator.h: 46 (JSC::MarkedAllocator::MarkedAllocator): 47 (JSC::MarkedAllocator::reset): 48 * heap/MarkedBlock.cpp: 49 * heap/MarkedBlock.h: 50 (JSC::MarkedBlock::lastChanceToFinalize): 51 (JSC::MarkedBlock::didConsumeEmptyFreeList): 52 (JSC::MarkedBlock::clearMarks): 53 * heap/MarkedSpace.cpp: 54 (JSC::MarkedSpace::~MarkedSpace): 55 (JSC::MarkedSpace::resetAllocators): 56 (JSC::MarkedSpace::visitWeakSets): 57 (JSC::MarkedSpace::reapWeakSets): 58 * heap/MarkedSpace.h: 59 (JSC::ClearMarks::operator()): 60 (JSC::MarkedSpace::clearMarks): 61 * heap/SlotVisitor.cpp: 62 (JSC::SlotVisitor::~SlotVisitor): 63 * heap/SlotVisitor.h: 64 (JSC::SlotVisitor::sharedData): 65 * heap/SlotVisitorInlines.h: 66 (JSC::SlotVisitor::internalAppend): 67 (JSC::SlotVisitor::copyLater): 68 (JSC::SlotVisitor::reportExtraMemoryUsage): 69 * jit/Repatch.cpp: 70 * runtime/JSGenericTypedArrayViewInlines.h: 71 (JSC::JSGenericTypedArrayView<Adaptor>::visitChildren): 72 * runtime/JSPropertyNameIterator.h: 73 (JSC::StructureRareData::setEnumerationCache): 74 * runtime/JSString.cpp: 75 (JSC::JSString::visitChildren): 76 * runtime/StructureRareDataInlines.h: 77 (JSC::StructureRareData::setPreviousID): 78 (JSC::StructureRareData::setObjectToStringValue): 79 * runtime/WeakMapData.cpp: 80 (JSC::WeakMapData::visitChildren): 81 1 82 2014-01-09 Andreas Kling <akling@apple.com> 2 83 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r161540 r161557 1955 1955 otherBlock->visitAggregate(visitor); 1956 1956 1957 visitor.reportExtraMemoryUsage( ownerExecutable(),sizeof(CodeBlock));1957 visitor.reportExtraMemoryUsage(sizeof(CodeBlock)); 1958 1958 if (m_jitCode) 1959 visitor.reportExtraMemoryUsage( ownerExecutable(),m_jitCode->size());1959 visitor.reportExtraMemoryUsage(m_jitCode->size()); 1960 1960 if (m_instructions.size()) { 1961 1961 // Divide by refCount() because m_instructions points to something that is shared … … 1963 1963 // Having each CodeBlock report only its proportional share of the size is one way 1964 1964 // of accomplishing this. 1965 visitor.reportExtraMemoryUsage( ownerExecutable(),m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());1965 visitor.reportExtraMemoryUsage(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount()); 1966 1966 } 1967 1967 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.h
r161540 r161557 1270 1270 1271 1271 (*iter)->m_mayBeExecuting = true; 1272 m_currentlyExecuting.append(static_cast<CodeBlock*>(candidateCodeBlock));1273 1272 } 1274 1273 -
trunk/Source/JavaScriptCore/dfg/DFGOperations.cpp
r161540 r161557 851 851 852 852 ASSERT(!object->structure()->outOfLineCapacity()); 853 DeferGC deferGC(vm.heap);854 853 Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity); 855 854 object->setButterflyWithoutChangingStructure(vm, result); … … 862 861 NativeCallFrameTracer tracer(&vm, exec); 863 862 864 DeferGC deferGC(vm.heap);865 863 Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize); 866 864 object->setButterflyWithoutChangingStructure(vm, result); -
trunk/Source/JavaScriptCore/heap/CodeBlockSet.cpp
r161540 r161557 46 46 void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock) 47 47 { 48 CodeBlock* block = codeBlock.leakRef(); 49 bool isNewEntry = m_set.add(block).isNewEntry; 48 bool isNewEntry = m_set.add(codeBlock.leakRef()).isNewEntry; 50 49 ASSERT_UNUSED(isNewEntry, isNewEntry); 51 50 } … … 103 102 if (!codeBlock->m_mayBeExecuting) 104 103 continue; 105 codeBlock-> ownerExecutable()->visitChildren(codeBlock->ownerExecutable(),visitor);104 codeBlock->visitAggregate(visitor); 106 105 } 107 }108 109 void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap)110 {111 for (size_t i = 0; i < m_currentlyExecuting.size(); ++i)112 heap->addToRememberedSet(m_currentlyExecuting[i]->ownerExecutable());113 m_currentlyExecuting.clear();114 106 } 115 107 -
trunk/Source/JavaScriptCore/heap/CodeBlockSet.h
r161540 r161557 31 31 #include <wtf/PassRefPtr.h> 32 32 #include <wtf/RefPtr.h> 33 #include <wtf/Vector.h>34 33 35 34 namespace JSC { 36 35 37 36 class CodeBlock; 38 class Heap;39 37 class SlotVisitor; 40 38 … … 68 66 void traceMarked(SlotVisitor&); 69 67 70 // Add all currently executing CodeBlocks to the remembered set to be71 // re-scanned during the next collection.72 void rememberCurrentlyExecutingCodeBlocks(Heap*);73 74 68 private: 75 69 // This is not a set of RefPtr<CodeBlock> because we need to be able to find … … 77 71 // and all, but that seemed like overkill. 78 72 HashSet<CodeBlock* > m_set; 79 Vector<CodeBlock*> m_currentlyExecuting;80 73 }; 81 74 -
trunk/Source/JavaScriptCore/heap/CopiedBlockInlines.h
r161540 r161557 43 43 m_liveBytes += bytes; 44 44 45 if (isPinned())46 return;47 48 45 if (!shouldEvacuate()) { 49 46 pin(); -
trunk/Source/JavaScriptCore/heap/CopiedSpace.cpp
r161540 r161557 317 317 } 318 318 319 void CopiedSpace::didStartFullCollection()320 {321 ASSERT(heap()->operationInProgress() == FullCollection);322 323 ASSERT(m_fromSpace->isEmpty());324 325 for (CopiedBlock* block = m_toSpace->head(); block; block = block->next())326 block->didSurviveGC();327 328 for (CopiedBlock* block = m_oversizeBlocks.head(); block; block = block->next())329 block->didSurviveGC();330 }331 332 319 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/CopiedSpace.h
r161540 r161557 61 61 CopiedAllocator& allocator() { return m_allocator; } 62 62 63 void didStartFullCollection();64 65 63 void startedCopying(); 66 64 void doneCopying(); … … 82 80 83 81 static CopiedBlock* blockFor(void*); 84 85 Heap* heap() const { return m_heap; }86 82 87 83 private: -
trunk/Source/JavaScriptCore/heap/Heap.cpp
r161544 r161557 254 254 , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize)) 255 255 , m_sizeAfterLastCollect(0) 256 , m_bytesAllocatedThisCycle(0) 257 , m_bytesAbandonedThisCycle(0) 258 , m_maxEdenSize(m_minBytesPerCycle) 259 , m_maxHeapSize(m_minBytesPerCycle) 260 , m_shouldDoFullCollection(false) 256 , m_bytesAllocatedLimit(m_minBytesPerCycle) 257 , m_bytesAllocated(0) 258 , m_bytesAbandoned(0) 261 259 , m_totalBytesVisited(0) 262 260 , m_totalBytesCopied(0) … … 272 270 , m_handleSet(vm) 273 271 , m_isSafeToCollect(false) 274 , m_writeBarrierBuffer( 256)272 , m_writeBarrierBuffer(128) 275 273 , m_vm(vm) 276 274 , m_lastGCLength(0) … … 335 333 { 336 334 if (m_activityCallback) 337 m_activityCallback->didAllocate(m_bytesAllocated ThisCycle + m_bytesAbandonedThisCycle);338 m_bytesAbandoned ThisCycle+= bytes;335 m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned); 336 m_bytesAbandoned += bytes; 339 337 } 340 338 … … 489 487 visitor.setup(); 490 488 HeapRootVisitor heapRootVisitor(visitor); 491 492 Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size());493 m_slotVisitor.markStack().fillVector(rememberedSet);494 489 495 490 { … … 596 591 } 597 592 598 {599 GCPHASE(ClearRememberedSet);600 for (unsigned i = 0; i < rememberedSet.size(); ++i) {601 const JSCell* cell = rememberedSet[i];602 MarkedBlock::blockFor(cell)->clearRemembered(cell);603 }604 }605 606 593 GCCOUNTER(VisitedValueCount, visitor.visitCount()); 607 594 … … 615 602 #endif 616 603 617 if (m_operationInProgress == EdenCollection) { 618 m_totalBytesVisited += visitor.bytesVisited(); 619 m_totalBytesCopied += visitor.bytesCopied(); 620 } else { 621 ASSERT(m_operationInProgress == FullCollection); 622 m_totalBytesVisited = visitor.bytesVisited(); 623 m_totalBytesCopied = visitor.bytesCopied(); 624 } 604 m_totalBytesVisited = visitor.bytesVisited(); 605 m_totalBytesCopied = visitor.bytesCopied(); 625 606 #if ENABLE(PARALLEL_GC) 626 607 m_totalBytesVisited += m_sharedData.childBytesVisited(); … … 635 616 } 636 617 637 template <HeapOperation collectionType>638 618 void Heap::copyBackingStores() 639 619 { 640 if (collectionType == EdenCollection)641 return;642 643 620 m_storageSpace.startedCopying(); 644 621 if (m_storageSpace.shouldDoCopyPhase()) { … … 651 628 m_storageSpace.doneCopying(); 652 629 m_sharedData.didFinishCopying(); 653 } else 630 } else 654 631 m_storageSpace.doneCopying(); 655 632 } … … 747 724 } 748 725 749 void Heap::addToRememberedSet(const JSCell* cell)750 {751 ASSERT(cell);752 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());753 ASSERT(isMarked(cell));754 if (isInRememberedSet(cell))755 return;756 MarkedBlock::blockFor(cell)->setRemembered(cell);757 m_slotVisitor.unconditionallyAppend(const_cast<JSCell*>(cell));758 }759 760 726 void Heap::collectAllGarbage() 761 727 { … … 763 729 return; 764 730 765 m_shouldDoFullCollection = true;766 731 collect(); 767 732 … … 800 765 m_vm->prepareToDiscardCode(); 801 766 } 802 803 bool isFullCollection = m_shouldDoFullCollection; 804 if (isFullCollection) { 805 m_operationInProgress = FullCollection; 806 m_slotVisitor.clearMarkStack(); 807 m_shouldDoFullCollection = false; 808 if (Options::logGC()) 809 dataLog("FullCollection, "); 810 } else { 811 #if ENABLE(GGC) 812 m_operationInProgress = EdenCollection; 813 if (Options::logGC()) 814 dataLog("EdenCollection, "); 815 #else 816 m_operationInProgress = FullCollection; 817 m_slotVisitor.clearMarkStack(); 818 if (Options::logGC()) 819 dataLog("FullCollection, "); 820 #endif 821 } 822 if (m_operationInProgress == FullCollection) 823 m_extraMemoryUsage = 0; 767 768 m_operationInProgress = Collection; 769 m_extraMemoryUsage = 0; 824 770 825 771 if (m_activityCallback) … … 835 781 GCPHASE(StopAllocation); 836 782 m_objectSpace.stopAllocating(); 837 if (m_operationInProgress == FullCollection)838 m_storageSpace.didStartFullCollection();839 }840 841 {842 GCPHASE(FlushWriteBarrierBuffer);843 if (m_operationInProgress == EdenCollection)844 m_writeBarrierBuffer.flush(*this);845 else846 m_writeBarrierBuffer.reset();847 783 } 848 784 … … 861 797 } 862 798 863 if (m_operationInProgress == FullCollection){799 { 864 800 m_blockSnapshot.resize(m_objectSpace.blocks().set().size()); 865 801 MarkedBlockSnapshotFunctor functor(m_blockSnapshot); … … 867 803 } 868 804 869 if (m_operationInProgress == FullCollection) 870 copyBackingStores<FullCollection>(); 871 else 872 copyBackingStores<EdenCollection>(); 805 copyBackingStores(); 873 806 874 807 { … … 887 820 } 888 821 889 if (m_operationInProgress == FullCollection) 890 m_sweeper->startSweeping(m_blockSnapshot); 891 892 { 893 GCPHASE(AddCurrentlyExecutingCodeBlocksToRememberedSet); 894 m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this); 895 } 896 897 m_bytesAbandonedThisCycle = 0; 822 m_sweeper->startSweeping(m_blockSnapshot); 823 m_bytesAbandoned = 0; 898 824 899 825 { … … 906 832 HeapStatistics::exitWithFailure(); 907 833 908 if (m_operationInProgress == FullCollection) {909 // To avoid pathological GC churn in very small and very large heaps, we set910 // the new allocation limit based on the current size of the heap, with a911 // fixed minimum.912 m_maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));913 m_maxEdenSize = m_maxHeapSize - currentHeapSize;914 } else {915 ASSERT(currentHeapSize >= m_sizeAfterLastCollect);916 m_maxEdenSize = m_maxHeapSize - currentHeapSize;917 double edenToOldGenerationRatio = (double)m_maxEdenSize / (double)m_maxHeapSize;918 double minEdenToOldGenerationRatio = 1.0 / 3.0;919 if (edenToOldGenerationRatio < minEdenToOldGenerationRatio)920 m_shouldDoFullCollection = true;921 m_maxHeapSize += currentHeapSize - m_sizeAfterLastCollect;922 m_maxEdenSize = m_maxHeapSize - currentHeapSize;923 }924 925 834 m_sizeAfterLastCollect = currentHeapSize; 926 835 927 m_bytesAllocatedThisCycle = 0; 836 // To avoid pathological GC churn in very small and very large heaps, we set 837 // the new allocation limit based on the current size of the heap, with a 838 // fixed minimum. 839 size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize)); 840 m_bytesAllocatedLimit = maxHeapSize - currentHeapSize; 841 842 m_bytesAllocated = 0; 928 843 double lastGCEndTime = WTF::monotonicallyIncreasingTime(); 929 844 m_lastGCLength = lastGCEndTime - lastGCStartTime; … … 931 846 if (Options::recordGCPauseTimes()) 932 847 HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime); 933 RELEASE_ASSERT(m_operationInProgress == EdenCollection || m_operationInProgress == FullCollection);848 RELEASE_ASSERT(m_operationInProgress == Collection); 934 849 935 850 m_operationInProgress = NoOperation; … … 949 864 dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n"); 950 865 } 866 867 #if ENABLE(ALLOCATION_LOGGING) 868 dataLogF("JSC GC finishing collection.\n"); 869 #endif 951 870 } 952 871 … … 998 917 { 999 918 if (m_activityCallback) 1000 m_activityCallback->didAllocate(m_bytesAllocated ThisCycle + m_bytesAbandonedThisCycle);1001 m_bytesAllocated ThisCycle+= bytes;919 m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned); 920 m_bytesAllocated += bytes; 1002 921 } 1003 922 … … 1074 993 decrementDeferralDepth(); 1075 994 collectIfNecessaryOrDefer(); 1076 }1077 1078 void Heap::writeBarrier(const JSCell* from)1079 {1080 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));1081 if (!from || !isMarked(from))1082 return;1083 Heap* heap = Heap::heap(from);1084 heap->addToRememberedSet(from);1085 995 } 1086 996 -
trunk/Source/JavaScriptCore/heap/Heap.h
r161540 r161557 95 95 static void setMarked(const void*); 96 96 97 JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);98 bool isInRememberedSet(const JSCell* cell) const99 {100 ASSERT(cell);101 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());102 return MarkedBlock::blockFor(cell)->isRemembered(cell);103 }104 97 static bool isWriteBarrierEnabled(); 105 JS_EXPORT_PRIVATEstatic void writeBarrier(const JSCell*);98 static void writeBarrier(const JSCell*); 106 99 static void writeBarrier(const JSCell*, JSValue); 107 100 static void writeBarrier(const JSCell*, JSCell*); 101 static uint8_t* addressOfCardFor(JSCell*); 108 102 109 103 WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; } … … 127 121 // true if collection is in progress 128 122 inline bool isCollecting(); 129 inline HeapOperation operationInProgress() { return m_operationInProgress; }130 123 // true if an allocation or collection is in progress 131 124 inline bool isBusy(); … … 244 237 void markProtectedObjects(HeapRootVisitor&); 245 238 void markTempSortVectors(HeapRootVisitor&); 246 template <HeapOperation collectionType>247 239 void copyBackingStores(); 248 240 void harvestWeakReferences(); … … 266 258 size_t m_sizeAfterLastCollect; 267 259 268 size_t m_bytesAllocatedThisCycle; 269 size_t m_bytesAbandonedThisCycle; 270 size_t m_maxEdenSize; 271 size_t m_maxHeapSize; 272 bool m_shouldDoFullCollection; 260 size_t m_bytesAllocatedLimit; 261 size_t m_bytesAllocated; 262 size_t m_bytesAbandoned; 263 273 264 size_t m_totalBytesVisited; 274 265 size_t m_totalBytesCopied; … … 281 272 size_t m_extraMemoryUsage; 282 273 283 HashSet<const JSCell*> m_copyingRememberedSet;284 285 274 ProtectCountSet m_protectedValues; 286 275 Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > m_tempSortingVectors; … … 334 323 return false; 335 324 if (Options::gcMaxHeapSize()) 336 return m_bytesAllocated ThisCycle> Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;337 return m_bytesAllocated ThisCycle > m_maxEdenSize&& m_isSafeToCollect && m_operationInProgress == NoOperation;325 return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation; 326 return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect && m_operationInProgress == NoOperation; 338 327 } 339 328 … … 345 334 bool Heap::isCollecting() 346 335 { 347 return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;336 return m_operationInProgress == Collection; 348 337 } 349 338 … … 382 371 inline bool Heap::isWriteBarrierEnabled() 383 372 { 384 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)373 #if ENABLE(WRITE_BARRIER_PROFILING) 385 374 return true; 386 375 #else … … 389 378 } 390 379 391 inline void Heap::writeBarrier(const JSCell* from, JSCell* to) 392 { 393 #if ENABLE(WRITE_BARRIER_PROFILING) 380 inline void Heap::writeBarrier(const JSCell*) 381 { 394 382 WriteBarrierCounters::countWriteBarrier(); 395 #endif 396 if (!from || !isMarked(from)) 397 return; 398 if (!to || isMarked(to)) 399 return; 400 Heap::heap(from)->addToRememberedSet(from); 401 } 402 403 inline void Heap::writeBarrier(const JSCell* from, JSValue to) 404 { 405 #if ENABLE(WRITE_BARRIER_PROFILING) 383 } 384 385 inline void Heap::writeBarrier(const JSCell*, JSCell*) 386 { 406 387 WriteBarrierCounters::countWriteBarrier(); 407 #endif 408 if (!to.isCell()) 409 return; 410 writeBarrier(from, to.asCell()); 388 } 389 390 inline void Heap::writeBarrier(const JSCell*, JSValue) 391 { 392 WriteBarrierCounters::countWriteBarrier(); 411 393 } 412 394 -
trunk/Source/JavaScriptCore/heap/HeapOperation.h
r161540 r161557 29 29 namespace JSC { 30 30 31 enum HeapOperation { NoOperation, Allocation, FullCollection, EdenCollection };31 enum HeapOperation { NoOperation, Allocation, Collection }; 32 32 33 33 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStack.cpp
r161540 r161557 58 58 MarkStackArray::~MarkStackArray() 59 59 { 60 ASSERT(m_numberOfSegments == 1); 61 ASSERT(m_segments.size() == 1); 60 ASSERT(m_numberOfSegments == 1 && m_segments.size() == 1); 62 61 m_blockAllocator.deallocate(MarkStackSegment::destroy(m_segments.removeHead())); 63 m_numberOfSegments--;64 ASSERT(!m_numberOfSegments);65 ASSERT(!m_segments.size());66 }67 68 void MarkStackArray::clear()69 {70 if (!m_segments.head())71 return;72 MarkStackSegment* next;73 for (MarkStackSegment* current = m_segments.head(); current->next(); current = next) {74 next = current->next();75 m_segments.remove(current);76 m_blockAllocator.deallocate(MarkStackSegment::destroy(current));77 }78 m_top = 0;79 m_numberOfSegments = 1;80 #if !ASSERT_DISABLED81 m_segments.head()->m_top = 0;82 #endif83 62 } 84 63 … … 189 168 } 190 169 191 void MarkStackArray::fillVector(Vector<const JSCell*>& vector)192 {193 ASSERT(vector.size() == size());194 195 MarkStackSegment* currentSegment = m_segments.head();196 if (!currentSegment)197 return;198 199 unsigned count = 0;200 for (unsigned i = 0; i < m_top; ++i) {201 ASSERT(currentSegment->data()[i]);202 vector[count++] = currentSegment->data()[i];203 }204 205 currentSegment = currentSegment->next();206 while (currentSegment) {207 for (unsigned i = 0; i < s_segmentCapacity; ++i) {208 ASSERT(currentSegment->data()[i]);209 vector[count++] = currentSegment->data()[i];210 }211 currentSegment = currentSegment->next();212 }213 }214 215 170 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStack.h
r161540 r161557 53 53 #include "HeapBlock.h" 54 54 #include <wtf/StdLibExtras.h> 55 #include <wtf/Vector.h>56 55 57 56 namespace JSC { … … 102 101 bool isEmpty(); 103 102 104 void fillVector(Vector<const JSCell*>&);105 void clear();106 107 103 private: 108 104 template <size_t size> struct CapacityFromSize { -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp
r161540 r161557 11 11 namespace JSC { 12 12 13 static bool isListPagedOut(double deadline, DoublyLinkedList<MarkedBlock>& list)13 bool MarkedAllocator::isPagedOut(double deadline) 14 14 { 15 15 unsigned itersSinceLastTimeCheck = 0; 16 MarkedBlock* block = list.head();16 MarkedBlock* block = m_blockList.head(); 17 17 while (block) { 18 18 block = block->next(); … … 25 25 } 26 26 } 27 return false;28 }29 27 30 bool MarkedAllocator::isPagedOut(double deadline)31 {32 if (isListPagedOut(deadline, m_blockList))33 return true;34 28 return false; 35 29 } … … 43 37 DelayedReleaseScope delayedReleaseScope(*m_markedSpace); 44 38 if (m_currentBlock) { 45 ASSERT(m_currentBlock == m_ nextBlockToSweep);39 ASSERT(m_currentBlock == m_blocksToSweep); 46 40 m_currentBlock->didConsumeFreeList(); 47 m_ nextBlockToSweep = m_currentBlock->next();41 m_blocksToSweep = m_currentBlock->next(); 48 42 } 49 43 50 MarkedBlock* next; 51 for (MarkedBlock*& block = m_nextBlockToSweep; block; block = next) { 52 next = block->next(); 53 44 for (MarkedBlock*& block = m_blocksToSweep; block; block = block->next()) { 54 45 MarkedBlock::FreeList freeList = block->sweep(MarkedBlock::SweepToFreeList); 55 56 46 if (!freeList.head) { 57 47 block->didConsumeEmptyFreeList(); 58 m_blockList.remove(block);59 m_blockList.push(block);60 if (!m_lastFullBlock)61 m_lastFullBlock = block;62 48 continue; 63 49 } … … 83 69 m_freeList.head = head->next; 84 70 ASSERT(head); 85 m_markedSpace->didAllocateInBlock(m_currentBlock);86 71 return head; 87 72 } … … 152 137 153 138 m_blockList.append(block); 154 m_ nextBlockToSweep = m_currentBlock = block;139 m_blocksToSweep = m_currentBlock = block; 155 140 m_freeList = block->sweep(MarkedBlock::SweepToFreeList); 156 141 m_markedSpace->didAddBlock(block); … … 163 148 m_freeList = MarkedBlock::FreeList(); 164 149 } 165 if (m_nextBlockToSweep == block) 166 m_nextBlockToSweep = m_nextBlockToSweep->next(); 167 168 if (block == m_lastFullBlock) 169 m_lastFullBlock = m_lastFullBlock->prev(); 170 150 if (m_blocksToSweep == block) 151 m_blocksToSweep = m_blocksToSweep->next(); 171 152 m_blockList.remove(block); 172 153 } 173 154 174 void MarkedAllocator::reset()175 {176 m_lastActiveBlock = 0;177 m_currentBlock = 0;178 m_freeList = MarkedBlock::FreeList();179 if (m_heap->operationInProgress() == FullCollection)180 m_lastFullBlock = 0;181 182 if (m_lastFullBlock)183 m_nextBlockToSweep = m_lastFullBlock->next() ? m_lastFullBlock->next() : m_lastFullBlock;184 else185 m_nextBlockToSweep = m_blockList.head();186 }187 188 155 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.h
r161540 r161557 53 53 MarkedBlock* m_currentBlock; 54 54 MarkedBlock* m_lastActiveBlock; 55 MarkedBlock* m_nextBlockToSweep; 56 MarkedBlock* m_lastFullBlock; 55 MarkedBlock* m_blocksToSweep; 57 56 DoublyLinkedList<MarkedBlock> m_blockList; 58 57 size_t m_cellSize; … … 70 69 : m_currentBlock(0) 71 70 , m_lastActiveBlock(0) 72 , m_nextBlockToSweep(0) 73 , m_lastFullBlock(0) 71 , m_blocksToSweep(0) 74 72 , m_cellSize(0) 75 73 , m_destructorType(MarkedBlock::None) … … 103 101 #endif 104 102 return head; 103 } 104 105 inline void MarkedAllocator::reset() 106 { 107 m_lastActiveBlock = 0; 108 m_currentBlock = 0; 109 m_freeList = MarkedBlock::FreeList(); 110 m_blocksToSweep = m_blockList.head(); 105 111 } 106 112 -
trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp
r161540 r161557 198 198 } 199 199 200 void MarkedBlock::clearMarks()201 {202 if (heap()->operationInProgress() == JSC::EdenCollection)203 this->clearMarksWithCollectionType<EdenCollection>();204 else205 this->clearMarksWithCollectionType<FullCollection>();206 }207 208 void MarkedBlock::clearRememberedSet()209 {210 m_rememberedSet.clearAll();211 }212 213 template <HeapOperation collectionType>214 void MarkedBlock::clearMarksWithCollectionType()215 {216 ASSERT(collectionType == FullCollection || collectionType == EdenCollection);217 HEAP_LOG_BLOCK_STATE_TRANSITION(this);218 219 ASSERT(m_state != New && m_state != FreeListed);220 if (collectionType == FullCollection) {221 m_marks.clearAll();222 m_rememberedSet.clearAll();223 }224 225 // This will become true at the end of the mark phase. We set it now to226 // avoid an extra pass to do so later.227 m_state = Marked;228 }229 230 void MarkedBlock::lastChanceToFinalize()231 {232 m_weakSet.lastChanceToFinalize();233 234 clearNewlyAllocated();235 clearMarksWithCollectionType<FullCollection>();236 sweep();237 }238 239 200 MarkedBlock::FreeList MarkedBlock::resumeAllocating() 240 201 { -
trunk/Source/JavaScriptCore/heap/MarkedBlock.h
r161540 r161557 26 26 #include "HeapBlock.h" 27 27 28 #include "HeapOperation.h"29 28 #include "WeakSet.h" 30 29 #include <wtf/Bitmap.h> … … 74 73 75 74 public: 76 static const size_t atomSize = 16; // bytes75 static const size_t atomSize = 8; // bytes 77 76 static const size_t atomShiftAmount = 4; // log_2(atomSize) FIXME: Change atomSize to 16. 78 77 static const size_t blockSize = 64 * KB; … … 142 141 FreeList resumeAllocating(); // Call this if you canonicalized a block for some non-collection related purpose. 143 142 void didConsumeEmptyFreeList(); // Call this if you sweep a block, but the returned FreeList is empty. 144 void didSweepToNoAvail(); // Call this if you sweep a block and get an empty free list back.145 143 146 144 // Returns true if the "newly allocated" bitmap was non-null … … 148 146 bool clearNewlyAllocated(); 149 147 void clearMarks(); 150 void clearRememberedSet();151 template <HeapOperation collectionType>152 void clearMarksWithCollectionType();153 154 148 size_t markCount(); 155 149 bool isEmpty(); … … 167 161 void setMarked(const void*); 168 162 void clearMarked(const void*); 169 170 void setRemembered(const void*);171 void clearRemembered(const void*);172 void atomicClearRemembered(const void*);173 bool isRemembered(const void*);174 163 175 164 bool isNewlyAllocated(const void*); … … 202 191 size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom. 203 192 #if ENABLE(PARALLEL_GC) 204 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_marks; 205 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_rememberedSet; 193 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic> m_marks; 206 194 #else 207 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_marks; 208 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_rememberedSet; 195 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic> m_marks; 209 196 #endif 210 197 OwnPtr<WTF::Bitmap<atomsPerBlock>> m_newlyAllocated; … … 248 235 } 249 236 237 inline void MarkedBlock::lastChanceToFinalize() 238 { 239 m_weakSet.lastChanceToFinalize(); 240 241 clearNewlyAllocated(); 242 clearMarks(); 243 sweep(); 244 } 245 250 246 inline MarkedAllocator* MarkedBlock::allocator() const 251 247 { … … 296 292 297 293 ASSERT(!m_newlyAllocated); 294 #ifndef NDEBUG 295 for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) 296 ASSERT(m_marks.get(i)); 297 #endif 298 298 ASSERT(m_state == FreeListed); 299 299 m_state = Marked; 300 300 } 301 301 302 inline void MarkedBlock::clearMarks() 303 { 304 HEAP_LOG_BLOCK_STATE_TRANSITION(this); 305 306 ASSERT(m_state != New && m_state != FreeListed); 307 m_marks.clearAll(); 308 309 // This will become true at the end of the mark phase. We set it now to 310 // avoid an extra pass to do so later. 311 m_state = Marked; 312 } 313 302 314 inline size_t MarkedBlock::markCount() 303 315 { … … 333 345 { 334 346 return (reinterpret_cast<Bits>(p) - reinterpret_cast<Bits>(this)) / atomSize; 335 }336 337 inline void MarkedBlock::setRemembered(const void* p)338 {339 m_rememberedSet.set(atomNumber(p));340 }341 342 inline void MarkedBlock::clearRemembered(const void* p)343 {344 m_rememberedSet.clear(atomNumber(p));345 }346 347 inline void MarkedBlock::atomicClearRemembered(const void* p)348 {349 m_rememberedSet.concurrentTestAndClear(atomNumber(p));350 }351 352 inline bool MarkedBlock::isRemembered(const void* p)353 {354 return m_rememberedSet.get(atomNumber(p));355 347 } 356 348 -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r161540 r161557 106 106 Free free(Free::FreeAll, this); 107 107 forEachBlock(free); 108 ASSERT(!m_blocks.set().size());109 108 } 110 109 … … 145 144 m_normalDestructorSpace.largeAllocator.reset(); 146 145 m_immortalStructureDestructorSpace.largeAllocator.reset(); 147 148 m_blocksWithNewObjects.clear();149 146 } 150 147 … … 152 149 { 153 150 VisitWeakSet visitWeakSet(heapRootVisitor); 154 if (m_heap->operationInProgress() == EdenCollection) { 155 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) 156 visitWeakSet(m_blocksWithNewObjects[i]); 157 } else 158 forEachBlock(visitWeakSet); 151 forEachBlock(visitWeakSet); 159 152 } 160 153 161 154 void MarkedSpace::reapWeakSets() 162 155 { 163 if (m_heap->operationInProgress() == EdenCollection) { 164 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) 165 m_blocksWithNewObjects[i]->reapWeakSet(); 166 } else 167 forEachBlock<ReapWeakSet>(); 156 forEachBlock<ReapWeakSet>(); 168 157 } 169 158 … … 317 306 } 318 307 319 #ifndef NDEBUG320 struct VerifyMarked : MarkedBlock::VoidFunctor {321 void operator()(MarkedBlock* block) { ASSERT(block->needsSweeping()); }322 };323 #endif324 325 void MarkedSpace::clearMarks()326 {327 if (m_heap->operationInProgress() == EdenCollection) {328 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)329 m_blocksWithNewObjects[i]->clearMarks();330 } else331 forEachBlock<ClearMarks>();332 #ifndef NDEBUG333 forEachBlock<VerifyMarked>();334 #endif335 }336 337 308 void MarkedSpace::willStartIterating() 338 309 { -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r161540 r161557 47 47 48 48 struct ClearMarks : MarkedBlock::VoidFunctor { 49 void operator()(MarkedBlock* block) 50 { 51 block->clearMarks(); 52 } 53 }; 54 55 struct ClearRememberedSet : MarkedBlock::VoidFunctor { 56 void operator()(MarkedBlock* block) 57 { 58 block->clearRememberedSet(); 59 } 49 void operator()(MarkedBlock* block) { block->clearMarks(); } 60 50 }; 61 51 … … 116 106 void didAddBlock(MarkedBlock*); 117 107 void didConsumeFreeList(MarkedBlock*); 118 void didAllocateInBlock(MarkedBlock*);119 108 120 109 void clearMarks(); 121 void clearRememberedSet();122 110 void clearNewlyAllocated(); 123 111 void sweep(); … … 163 151 bool m_isIterating; 164 152 MarkedBlockSet m_blocks; 165 Vector<MarkedBlock*> m_blocksWithNewObjects;166 153 167 154 DelayedReleaseScope* m_currentDelayedReleaseScope; … … 276 263 } 277 264 278 inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block) 279 { 280 m_blocksWithNewObjects.append(block); 281 } 282 283 inline void MarkedSpace::clearRememberedSet() 284 { 285 forEachBlock<ClearRememberedSet>(); 265 inline void MarkedSpace::clearMarks() 266 { 267 forEachBlock<ClearMarks>(); 286 268 } 287 269 -
trunk/Source/JavaScriptCore/heap/SlotVisitor.cpp
r161540 r161557 34 34 SlotVisitor::~SlotVisitor() 35 35 { 36 clearMarkStack();36 ASSERT(m_stack.isEmpty()); 37 37 } 38 38 … … 62 62 m_shouldHashCons = false; 63 63 } 64 }65 66 void SlotVisitor::clearMarkStack()67 {68 m_stack.clear();69 64 } 70 65 -
trunk/Source/JavaScriptCore/heap/SlotVisitor.h
r161540 r161557 50 50 ~SlotVisitor(); 51 51 52 MarkStackArray& markStack() { return m_stack; }53 54 Heap* heap() const;55 56 52 void append(ConservativeRoots&); 57 53 … … 66 62 template<typename T> 67 63 void appendUnbarrieredWeak(Weak<T>*); 68 void unconditionallyAppend(JSCell*);69 64 70 65 void addOpaqueRoot(void*); … … 73 68 int opaqueRootCount(); 74 69 75 GCThreadSharedData& sharedData() const{ return m_shared; }70 GCThreadSharedData& sharedData() { return m_shared; } 76 71 bool isEmpty() { return m_stack.isEmpty(); } 77 72 78 73 void setup(); 79 74 void reset(); 80 void clearMarkStack();81 75 82 76 size_t bytesVisited() const { return m_bytesVisited; } … … 96 90 void copyLater(JSCell*, CopyToken, void*, size_t); 97 91 98 void reportExtraMemoryUsage( JSCell* owner, size_t);92 void reportExtraMemoryUsage(size_t size); 99 93 100 94 void addWeakReferenceHarvester(WeakReferenceHarvester*); -
trunk/Source/JavaScriptCore/heap/SlotVisitorInlines.h
r161540 r161557 106 106 MARK_LOG_CHILD(*this, cell); 107 107 108 unconditionallyAppend(cell);109 }110 111 ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell)112 {113 ASSERT(Heap::isMarked(cell));114 m_visitCount++;115 116 108 // Should never attempt to mark something that is zapped. 117 109 ASSERT(!cell->isZapped()); … … 227 219 { 228 220 ASSERT(bytes); 229 // We don't do any copying during EdenCollections.230 ASSERT(heap()->operationInProgress() != EdenCollection);231 232 221 m_bytesCopied += bytes; 233 222 … … 238 227 } 239 228 229 if (block->isPinned()) 230 return; 231 240 232 block->reportLiveBytes(owner, token, bytes); 241 233 } 242 234 243 inline void SlotVisitor::reportExtraMemoryUsage(JSCell* owner, size_t size) 244 { 245 // We don't want to double-count the extra memory that was reported in previous collections. 246 if (heap()->operationInProgress() == EdenCollection && MarkedBlock::blockFor(owner)->isRemembered(owner)) 247 return; 248 235 inline void SlotVisitor::reportExtraMemoryUsage(size_t size) 236 { 249 237 size_t* counter = &m_shared.m_vm->heap.m_extraMemoryUsage; 250 238 … … 260 248 } 261 249 262 inline Heap* SlotVisitor::heap() const263 {264 return &sharedData().m_vm->heap;265 }266 267 250 } // namespace JSC 268 251 -
trunk/Source/JavaScriptCore/jit/Repatch.cpp
r161540 r161557 40 40 #include "RepatchBuffer.h" 41 41 #include "ScratchRegisterAllocator.h" 42 #include "StackAlignment.h"43 42 #include "StructureRareDataInlines.h" 44 43 #include "StructureStubClearingWatchpoint.h" -
trunk/Source/JavaScriptCore/runtime/JSGenericTypedArrayViewInlines.h
r161540 r161557 448 448 449 449 case OversizeTypedArray: { 450 visitor.reportExtraMemoryUsage(thisObject , thisObject->byteSize());450 visitor.reportExtraMemoryUsage(thisObject->byteSize()); 451 451 break; 452 452 } -
trunk/Source/JavaScriptCore/runtime/JSPropertyNameIterator.h
r161540 r161557 110 110 } 111 111 112 inline void StructureRareData::setEnumerationCache(VM& vm, const Structure* , JSPropertyNameIterator* value)112 inline void StructureRareData::setEnumerationCache(VM& vm, const Structure* owner, JSPropertyNameIterator* value) 113 113 { 114 m_enumerationCache.set(vm, this, value);114 m_enumerationCache.set(vm, owner, value); 115 115 } 116 116 -
trunk/Source/JavaScriptCore/runtime/JSString.cpp
r161540 r161557 73 73 StringImpl* impl = thisObject->m_value.impl(); 74 74 ASSERT(impl); 75 visitor.reportExtraMemoryUsage( thisObject,impl->costDuringGC());75 visitor.reportExtraMemoryUsage(impl->costDuringGC()); 76 76 } 77 77 } -
trunk/Source/JavaScriptCore/runtime/StructureRareDataInlines.h
r161540 r161557 36 36 } 37 37 38 inline void StructureRareData::setPreviousID(VM& vm, Structure* , Structure* structure)38 inline void StructureRareData::setPreviousID(VM& vm, Structure* transition, Structure* structure) 39 39 { 40 m_previous.set(vm, t his, structure);40 m_previous.set(vm, transition, structure); 41 41 } 42 42 … … 51 51 } 52 52 53 inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell* , JSString* value)53 inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell* owner, JSString* value) 54 54 { 55 m_objectToStringValue.set(vm, this, value);55 m_objectToStringValue.set(vm, owner, value); 56 56 } 57 57 -
trunk/Source/JavaScriptCore/runtime/WeakMapData.cpp
r161540 r161557 65 65 // This isn't exact, but it is close enough, and proportional to the actual 66 66 // external mermory usage. 67 visitor.reportExtraMemoryUsage(thisObj , thisObj->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>)));67 visitor.reportExtraMemoryUsage(thisObj->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>))); 68 68 } 69 69 -
trunk/Source/WTF/ChangeLog
r161540 r161557 1 2014-01-09 Commit Queue <commit-queue@webkit.org> 2 3 Unreviewed, rolling out r161540. 4 http://trac.webkit.org/changeset/161540 5 https://bugs.webkit.org/show_bug.cgi?id=126704 6 7 Caused assertion failures on multiple tests (Requested by ap 8 on #webkit). 9 10 * wtf/Bitmap.h: 11 (WTF::WordType>::count): 12 1 13 2014-01-07 Mark Hahnenberg <mhahnenberg@apple.com> 2 14 -
trunk/Source/WTF/wtf/Bitmap.h
r161540 r161557 197 197 } 198 198 for (size_t i = start / wordSize; i < words; ++i) 199 result += WTF::bitCount( static_cast<unsigned>(bits[i]));199 result += WTF::bitCount(bits[i]); 200 200 return result; 201 201 }
Note: See TracChangeset
for help on using the changeset viewer.