Changeset 252452 in webkit
- Timestamp:
- Nov 14, 2019 1:37:12 AM (4 years ago)
- Location:
- trunk/Source
- Files:
-
- 1 added
- 29 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/CMakeLists.txt
r252302 r252452 570 570 heap/AllocatorForMode.h 571 571 heap/BlockDirectory.h 572 heap/BlockDirectoryBits.h 572 573 heap/BlockDirectoryInlines.h 573 574 heap/CellAttributes.h -
trunk/Source/JavaScriptCore/ChangeLog
r252422 r252452 1 2019-11-14 Yusuke Suzuki <ysuzuki@apple.com> 2 3 [JSC] BlockDirectory's bits should be compact 4 https://bugs.webkit.org/show_bug.cgi?id=204149 5 6 Reviewed by Robin Morisset. 7 8 We start applying IsoSubspace to all JSCells. This means that IsoSubspace should be small enough, 9 so that we can hold many IsoSubspaces without considering about memory regression. 10 11 In this patch, we introduce several things to shrink sizeof(IsoSubspace) from 528 to 384. 12 13 1. Adjusting members to remove some paddings. 14 2. Remove m_heap field since this can be got from the caller easily. 15 3. Make MarkedSpace::heap() efficient: just doing pointer arithmetic. 16 4. Remove m_size field from IsoSubspace since BlockDirectory knows cellSize. 17 5. Introduce BlockDirectoryBits, which repalces 9 FastBitVector in BlockDirectory to this one class. 18 Since all FastBitVector has the same size, we should not have a size field for each FastBitVector. 19 We reuse FastBitVector's View mechanism to keep the same ergonomics while making BlockDirectoryBits 20 much smaller. We put 9 uint32_t as Segment, and manage Vector<Segment> in this data structure. Since 21 we touch several bits at the same time for the same block-index, this data structure is compact and 22 efficient. 23 24 * CMakeLists.txt: 25 * JavaScriptCore.xcodeproj/project.pbxproj: 26 * heap/AlignedMemoryAllocator.cpp: 27 (JSC::AlignedMemoryAllocator::registerDirectory): 28 * heap/AlignedMemoryAllocator.h: 29 * heap/Allocator.h: 30 * heap/AllocatorInlines.h: 31 (JSC::Allocator::allocate const): 32 * heap/BlockDirectory.cpp: 33 (JSC::BlockDirectory::BlockDirectory): 34 (JSC::BlockDirectory::findEmptyBlockToSteal): 35 (JSC::BlockDirectory::findBlockForAllocation): 36 (JSC::BlockDirectory::tryAllocateBlock): 37 (JSC::BlockDirectory::addBlock): 38 (JSC::BlockDirectory::removeBlock): 39 (JSC::BlockDirectory::prepareForAllocation): 40 (JSC::BlockDirectory::beginMarkingForFullCollection): 41 (JSC::BlockDirectory::endMarking): 42 (JSC::BlockDirectory::snapshotUnsweptForEdenCollection): 43 (JSC::BlockDirectory::snapshotUnsweptForFullCollection): 44 (JSC::BlockDirectory::findBlockToSweep): 45 (JSC::BlockDirectory::sweep): 46 (JSC::BlockDirectory::shrink): 47 (JSC::BlockDirectory::assertNoUnswept): 48 (JSC::BlockDirectory::parallelNotEmptyBlockSource): 49 (JSC::BlockDirectory::dumpBits): 50 * heap/BlockDirectory.h: 51 (JSC::BlockDirectory::cellKind const): 52 (JSC::BlockDirectory::forEachBitVector): 53 (JSC::BlockDirectory::forEachBitVectorWithName): 54 (JSC::BlockDirectory::heap): Deleted. 55 * heap/BlockDirectoryBits.h: Added. 56 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::BlockDirectoryBitVectorWordView): 57 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::numBits const): 58 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::word const): 59 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::word): 60 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::clearAll): 61 (JSC::BlockDirectoryBits::BlockDirectoryBitVectorWordView::view const): 62 (JSC::BlockDirectoryBits::numBits const): 63 (JSC::BlockDirectoryBits::resize): 64 (JSC::BlockDirectoryBits::forEachSegment): 65 * heap/BlockDirectoryInlines.h: 66 (JSC::BlockDirectory::forEachBlock): 67 (JSC::BlockDirectory::forEachNotEmptyBlock): 68 * heap/CompleteSubspace.cpp: 69 (JSC::CompleteSubspace::allocatorForSlow): 70 (JSC::CompleteSubspace::tryAllocateSlow): 71 * heap/CompleteSubspaceInlines.h: 72 (JSC::CompleteSubspace::allocateNonVirtual): 73 * heap/IsoCellSet.cpp: 74 (JSC::IsoCellSet::parallelNotEmptyMarkedBlockSource): 75 * heap/IsoCellSetInlines.h: 76 (JSC::IsoCellSet::forEachMarkedCell): 77 * heap/IsoSubspace.cpp: 78 (JSC::IsoSubspace::IsoSubspace): 79 (JSC::IsoSubspace::tryAllocateFromLowerTier): 80 * heap/IsoSubspace.h: 81 (JSC::IsoSubspace::cellSize): 82 (JSC::IsoSubspace::allocatorForNonVirtual): 83 (JSC::IsoSubspace::size const): Deleted. 84 (): Deleted. 85 * heap/IsoSubspaceInlines.h: 86 (JSC::IsoSubspace::allocateNonVirtual): 87 * heap/IsoSubspacePerVM.cpp: 88 (JSC::IsoSubspacePerVM::AutoremovingIsoSubspace::~AutoremovingIsoSubspace): 89 * heap/LocalAllocator.cpp: 90 (JSC::LocalAllocator::allocateSlowCase): 91 (JSC::LocalAllocator::doTestCollectionsIfNeeded): 92 * heap/LocalAllocator.h: 93 * heap/LocalAllocatorInlines.h: 94 (JSC::LocalAllocator::allocate): 95 * heap/MarkedBlock.cpp: 96 (JSC::MarkedBlock::Handle::dumpState): 97 * heap/MarkedSpace.cpp: 98 (JSC::MarkedSpace::MarkedSpace): 99 (JSC::MarkedSpace::sweepBlocks): 100 (JSC::MarkedSpace::prepareForAllocation): 101 (JSC::MarkedSpace::visitWeakSets): 102 (JSC::MarkedSpace::reapWeakSets): 103 (JSC::MarkedSpace::prepareForMarking): 104 (JSC::MarkedSpace::beginMarking): 105 (JSC::MarkedSpace::snapshotUnswept): 106 * heap/MarkedSpace.h: 107 (JSC::MarkedSpace::heap const): Deleted. 108 * heap/MarkedSpaceInlines.h: 109 (JSC::MarkedSpace::heap const): 110 * heap/Subspace.cpp: 111 (JSC::Subspace::initialize): 112 * heap/Subspace.h: 113 1 114 2019-11-13 Robin Morisset <rmorisset@apple.com> 2 115 -
trunk/Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
r252310 r252452 1818 1818 E3794E761B77EB97005543AE /* ModuleAnalyzer.h in Headers */ = {isa = PBXBuildFile; fileRef = E3794E741B77EB97005543AE /* ModuleAnalyzer.h */; settings = {ATTRIBUTES = (Private, ); }; }; 1819 1819 E3850B15226ED641009ABF9C /* DFGMinifiedIDInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = E3850B14226ED63E009ABF9C /* DFGMinifiedIDInlines.h */; }; 1820 E38652E3237CA0C900E1D5EE /* BlockDirectoryBits.h in Headers */ = {isa = PBXBuildFile; fileRef = E38652E2237CA0C800E1D5EE /* BlockDirectoryBits.h */; settings = {ATTRIBUTES = (Private, ); }; }; 1820 1821 E3893A1D2203A7C600E79A74 /* AsyncFromSyncIteratorPrototype.lut.h in Headers */ = {isa = PBXBuildFile; fileRef = E3893A1C2203A7C600E79A74 /* AsyncFromSyncIteratorPrototype.lut.h */; }; 1821 1822 E38D999C221B78BB00D50474 /* JSNonDestructibleProxy.h in Headers */ = {isa = PBXBuildFile; fileRef = E38D999A221B789F00D50474 /* JSNonDestructibleProxy.h */; settings = {ATTRIBUTES = (Private, ); }; }; … … 4948 4949 E380D66B1F19249D00A59095 /* BuiltinNames.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = BuiltinNames.cpp; sourceTree = "<group>"; }; 4949 4950 E3850B14226ED63E009ABF9C /* DFGMinifiedIDInlines.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = DFGMinifiedIDInlines.h; path = dfg/DFGMinifiedIDInlines.h; sourceTree = "<group>"; }; 4951 E38652E2237CA0C800E1D5EE /* BlockDirectoryBits.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BlockDirectoryBits.h; sourceTree = "<group>"; }; 4950 4952 E3893A1C2203A7C600E79A74 /* AsyncFromSyncIteratorPrototype.lut.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AsyncFromSyncIteratorPrototype.lut.h; sourceTree = "<group>"; }; 4951 4953 E38D060B1F8E814100649CF2 /* JSScriptFetchParameters.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSScriptFetchParameters.h; sourceTree = "<group>"; }; … … 6103 6105 C2B916C414DA040C00CBAC86 /* BlockDirectory.cpp */, 6104 6106 C2B916C114DA014E00CBAC86 /* BlockDirectory.h */, 6107 E38652E2237CA0C800E1D5EE /* BlockDirectoryBits.h */, 6105 6108 0F7DF1451E2BEF680095951B /* BlockDirectoryInlines.h */, 6106 6109 0F9630351D4192C3005609D9 /* CellAttributes.cpp */, … … 9009 9012 0F64B2721A784BAF006E4E66 /* BinarySwitch.h in Headers */, 9010 9013 C2B916C214DA014E00CBAC86 /* BlockDirectory.h in Headers */, 9014 E38652E3237CA0C900E1D5EE /* BlockDirectoryBits.h in Headers */, 9011 9015 0F7DF1461E2BEF6A0095951B /* BlockDirectoryInlines.h in Headers */, 9012 9016 BC18C3EC0E16F5CD00B34460 /* BooleanObject.h in Headers */, -
trunk/Source/JavaScriptCore/heap/AlignedMemoryAllocator.cpp
r245258 r252452 41 41 } 42 42 43 void AlignedMemoryAllocator::registerDirectory( BlockDirectory* directory)43 void AlignedMemoryAllocator::registerDirectory(Heap& heap, BlockDirectory* directory) 44 44 { 45 45 RELEASE_ASSERT(!directory->nextDirectoryInAlignedMemoryAllocator()); 46 46 47 47 if (m_directories.isEmpty()) { 48 ASSERT (!Thread::mayBeGCThread() || directory->heap()->worldIsStopped());48 ASSERT_UNUSED(heap, !Thread::mayBeGCThread() || heap.worldIsStopped()); 49 49 for (Subspace* subspace = m_subspaces.first(); subspace; subspace = subspace->nextSubspaceInAlignedMemoryAllocator()) 50 50 subspace->didCreateFirstDirectory(directory); -
trunk/Source/JavaScriptCore/heap/AlignedMemoryAllocator.h
r243688 r252452 32 32 33 33 class BlockDirectory; 34 class Heap; 34 35 class Subspace; 35 36 … … 46 47 virtual void dump(PrintStream&) const = 0; 47 48 48 void registerDirectory( BlockDirectory*);49 void registerDirectory(Heap&, BlockDirectory*); 49 50 BlockDirectory* firstDirectory() const { return m_directories.first(); } 50 51 -
trunk/Source/JavaScriptCore/heap/Allocator.h
r232074 r252452 32 32 33 33 class GCDeferralContext; 34 class Heap; 34 35 class LocalAllocator; 35 36 … … 46 47 } 47 48 48 void* allocate( GCDeferralContext*, AllocationFailureMode) const;49 void* allocate(Heap&, GCDeferralContext*, AllocationFailureMode) const; 49 50 50 51 unsigned cellSize() const; -
trunk/Source/JavaScriptCore/heap/AllocatorInlines.h
r232132 r252452 31 31 namespace JSC { 32 32 33 ALWAYS_INLINE void* Allocator::allocate( GCDeferralContext* context, AllocationFailureMode mode) const33 ALWAYS_INLINE void* Allocator::allocate(Heap& heap, GCDeferralContext* context, AllocationFailureMode mode) const 34 34 { 35 return m_localAllocator->allocate( context, mode);35 return m_localAllocator->allocate(heap, context, mode); 36 36 } 37 37 -
trunk/Source/JavaScriptCore/heap/BlockDirectory.cpp
r244506 r252452 39 39 namespace JSC { 40 40 41 BlockDirectory::BlockDirectory( Heap* heap,size_t cellSize)41 BlockDirectory::BlockDirectory(size_t cellSize) 42 42 : m_cellSize(static_cast<unsigned>(cellSize)) 43 , m_heap(heap)44 43 { 45 44 } … … 77 76 MarkedBlock::Handle* BlockDirectory::findEmptyBlockToSteal() 78 77 { 79 m_emptyCursor = m_ empty.findBit(m_emptyCursor, true);78 m_emptyCursor = m_bits.empty().findBit(m_emptyCursor, true); 80 79 if (m_emptyCursor >= m_blocks.size()) 81 80 return nullptr; … … 86 85 { 87 86 for (;;) { 88 allocator.m_allocationCursor = (m_ canAllocateButNotEmpty | m_empty).findBit(allocator.m_allocationCursor, true);87 allocator.m_allocationCursor = (m_bits.canAllocateButNotEmpty() | m_bits.empty()).findBit(allocator.m_allocationCursor, true); 89 88 if (allocator.m_allocationCursor >= m_blocks.size()) 90 89 return nullptr; … … 97 96 } 98 97 99 MarkedBlock::Handle* BlockDirectory::tryAllocateBlock( )98 MarkedBlock::Handle* BlockDirectory::tryAllocateBlock(Heap& heap) 100 99 { 101 100 SuperSamplerScope superSamplerScope(false); 102 101 103 MarkedBlock::Handle* handle = MarkedBlock::tryCreate( *m_heap, subspace()->alignedMemoryAllocator());102 MarkedBlock::Handle* handle = MarkedBlock::tryCreate(heap, subspace()->alignedMemoryAllocator()); 104 103 if (!handle) 105 104 return nullptr; … … 119 118 m_blocks.append(block); 120 119 if (m_blocks.capacity() != oldCapacity) { 121 forEachBitVector( 122 NoLockingNecessary, 123 [&] (FastBitVector& vector) { 124 ASSERT_UNUSED(vector, vector.numBits() == oldCapacity); 125 }); 126 120 ASSERT(m_bits.numBits() == oldCapacity); 127 121 ASSERT(m_blocks.capacity() > oldCapacity); 128 122 129 123 LockHolder locker(m_bitvectorLock); 130 124 subspace()->didResizeBits(m_blocks.capacity()); 131 forEachBitVector( 132 locker, 133 [&] (FastBitVector& vector) { 134 vector.resize(m_blocks.capacity()); 135 }); 125 m_bits.resize(m_blocks.capacity()); 136 126 } 137 127 } else { … … 143 133 forEachBitVector( 144 134 NoLockingNecessary, 145 [&] (FastBitVector& vector) {146 ASSERT_UNUSED(vector , !vector[index]);135 [&](auto vectorRef) { 136 ASSERT_UNUSED(vectorRef, !vectorRef[index]); 147 137 }); 148 138 … … 166 156 forEachBitVector( 167 157 holdLock(m_bitvectorLock), 168 [&] (FastBitVector& vector) {169 vector [block->index()] = false;158 [&](auto vectorRef) { 159 vectorRef[block->index()] = false; 170 160 }); 171 161 … … 193 183 m_emptyCursor = 0; 194 184 195 m_ eden.clearAll();185 m_bits.eden().clearAll(); 196 186 197 187 if (UNLIKELY(Options::useImmortalObjects())) { … … 238 228 // collections, so if you survived the last collection you will survive the next one so long 239 229 // as the next one is eden. 240 m_ markingNotEmpty.clearAll();241 m_ markingRetired.clearAll();230 m_bits.markingNotEmpty().clearAll(); 231 m_bits.markingRetired().clearAll(); 242 232 } 243 233 244 234 void BlockDirectory::endMarking() 245 235 { 246 m_ allocated.clearAll();236 m_bits.allocated().clearAll(); 247 237 248 238 // It's surprising and frustrating to comprehend, but the end-of-marking flip does not need to … … 250 240 // vectors. 251 241 252 m_ empty = m_live & ~m_markingNotEmpty;253 m_ canAllocateButNotEmpty = m_live & m_markingNotEmpty & ~m_markingRetired;242 m_bits.empty() = m_bits.live() & ~m_bits.markingNotEmpty(); 243 m_bits.canAllocateButNotEmpty() = m_bits.live() & m_bits.markingNotEmpty() & ~m_bits.markingRetired(); 254 244 255 245 if (needsDestruction()) { … … 259 249 // we just allocate a block or when we move a block from one size class to another. That doesn't 260 250 // happen here. 261 m_ destructible = m_live;251 m_bits.destructible() = m_bits.live(); 262 252 } 263 253 … … 270 260 void BlockDirectory::snapshotUnsweptForEdenCollection() 271 261 { 272 m_ unswept |= m_eden;262 m_bits.unswept() |= m_bits.eden(); 273 263 } 274 264 275 265 void BlockDirectory::snapshotUnsweptForFullCollection() 276 266 { 277 m_ unswept = m_live;267 m_bits.unswept() = m_bits.live(); 278 268 } 279 269 280 270 MarkedBlock::Handle* BlockDirectory::findBlockToSweep() 281 271 { 282 m_unsweptCursor = m_ unswept.findBit(m_unsweptCursor, true);272 m_unsweptCursor = m_bits.unswept().findBit(m_unsweptCursor, true); 283 273 if (m_unsweptCursor >= m_blocks.size()) 284 274 return nullptr; … … 288 278 void BlockDirectory::sweep() 289 279 { 290 m_ unswept.forEachSetBit(280 m_bits.unswept().forEachSetBit( 291 281 [&] (size_t index) { 292 282 MarkedBlock::Handle* block = m_blocks[index]; … … 297 287 void BlockDirectory::shrink() 298 288 { 299 (m_ empty & ~m_destructible).forEachSetBit(289 (m_bits.empty() & ~m_bits.destructible()).forEachSetBit( 300 290 [&] (size_t index) { 301 291 markedSpace().freeBlock(m_blocks[index]); … … 308 298 return; 309 299 310 if (m_ unswept.isEmpty())300 if (m_bits.unswept().isEmpty()) 311 301 return; 312 302 … … 330 320 return nullptr; 331 321 auto locker = holdLock(m_lock); 332 m_index = m_directory.m_ markingNotEmpty.findBit(m_index, true);322 m_index = m_directory.m_bits.markingNotEmpty().findBit(m_index, true); 333 323 if (m_index >= m_directory.m_blocks.size()) { 334 324 m_done = true; … … 358 348 forEachBitVectorWithName( 359 349 NoLockingNecessary, 360 [&] (FastBitVector&, const char* name) { 350 [&](auto vectorRef, const char* name) { 351 UNUSED_PARAM(vectorRef); 361 352 unsigned length = strlen(name); 362 353 maxNameLength = std::max(maxNameLength, length); … … 365 356 forEachBitVectorWithName( 366 357 NoLockingNecessary, 367 [&] (FastBitVector& vector, const char* name) {358 [&](auto vectorRef, const char* name) { 368 359 out.print(" ", name, ": "); 369 360 for (unsigned i = maxNameLength - strlen(name); i--;) 370 361 out.print(" "); 371 out.print(vector , "\n");362 out.print(vectorRef, "\n"); 372 363 }); 373 364 } -
trunk/Source/JavaScriptCore/heap/BlockDirectory.h
r240216 r252452 27 27 28 28 #include "AllocationFailureMode.h" 29 #include "BlockDirectoryBits.h" 29 30 #include "CellAttributes.h" 30 31 #include "FreeList.h" … … 45 46 class LLIntOffsetsExtractor; 46 47 47 #define FOR_EACH_BLOCK_DIRECTORY_BIT(macro) \48 macro(live, Live) /* The set of block indices that have actual blocks. */\49 macro(empty, Empty) /* The set of all blocks that have no live objects. */ \50 macro(allocated, Allocated) /* The set of all blocks that are full of live objects. */\51 macro(canAllocateButNotEmpty, CanAllocateButNotEmpty) /* The set of all blocks are neither empty nor retired (i.e. are more than minMarkedBlockUtilization full). */ \52 macro(destructible, Destructible) /* The set of all blocks that may have destructors to run. */\53 macro(eden, Eden) /* The set of all blocks that have new objects since the last GC. */\54 macro(unswept, Unswept) /* The set of all blocks that could be swept by the incremental sweeper. */\55 \56 /* These are computed during marking. */\57 macro(markingNotEmpty, MarkingNotEmpty) /* The set of all blocks that are not empty. */ \58 macro(markingRetired, MarkingRetired) /* The set of all blocks that are retired. */59 60 // FIXME: We defined canAllocateButNotEmpty and empty to be exclusive:61 //62 // canAllocateButNotEmpty & empty == 063 //64 // Instead of calling it canAllocate and making it inclusive:65 //66 // canAllocate & empty == empty67 //68 // The latter is probably better. I'll leave it to a future bug to fix that, since breathing on69 // this code leads to regressions for days, and it's not clear that making this change would70 // improve perf since it would not change the collector's behavior, and either way the directory71 // has to look at both bitvectors.72 // https://bugs.webkit.org/show_bug.cgi?id=16212173 74 48 class BlockDirectory { 75 49 WTF_MAKE_NONCOPYABLE(BlockDirectory); … … 79 53 80 54 public: 81 BlockDirectory( Heap*,size_t cellSize);55 BlockDirectory(size_t cellSize); 82 56 ~BlockDirectory(); 83 57 void setSubspace(Subspace*); … … 99 73 DestructionMode destruction() const { return m_attributes.destruction; } 100 74 HeapCell::Kind cellKind() const { return m_attributes.cellKind; } 101 Heap* heap() { return m_heap; }102 75 103 76 bool isFreeListedCell(const void* target); … … 116 89 117 90 #define BLOCK_DIRECTORY_BIT_ACCESSORS(lowerBitName, capitalBitName) \ 118 bool is ## capitalBitName(const AbstractLocker&, size_t index) const { return m_ ## lowerBitName[index]; } \91 bool is ## capitalBitName(const AbstractLocker&, size_t index) const { return m_bits.is ## capitalBitName(index); } \ 119 92 bool is ## capitalBitName(const AbstractLocker& locker, MarkedBlock::Handle* block) const { return is ## capitalBitName(locker, block->index()); } \ 120 void setIs ## capitalBitName(const AbstractLocker&, size_t index, bool value) { m_ ## lowerBitName[index] = value; } \93 void setIs ## capitalBitName(const AbstractLocker&, size_t index, bool value) { m_bits.setIs ## capitalBitName(index, value); } \ 121 94 void setIs ## capitalBitName(const AbstractLocker& locker, MarkedBlock::Handle* block, bool value) { setIs ## capitalBitName(locker, block->index(), value); } 122 95 FOR_EACH_BLOCK_DIRECTORY_BIT(BLOCK_DIRECTORY_BIT_ACCESSORS) … … 127 100 { 128 101 #define BLOCK_DIRECTORY_BIT_CALLBACK(lowerBitName, capitalBitName) \ 129 func(m_ ## lowerBitName);102 func(m_bits.lowerBitName()); 130 103 FOR_EACH_BLOCK_DIRECTORY_BIT(BLOCK_DIRECTORY_BIT_CALLBACK); 131 104 #undef BLOCK_DIRECTORY_BIT_CALLBACK … … 136 109 { 137 110 #define BLOCK_DIRECTORY_BIT_CALLBACK(lowerBitName, capitalBitName) \ 138 func(m_ ## lowerBitName, #capitalBitName);111 func(m_bits.lowerBitName(), #capitalBitName); 139 112 FOR_EACH_BLOCK_DIRECTORY_BIT(BLOCK_DIRECTORY_BIT_CALLBACK); 140 113 #undef BLOCK_DIRECTORY_BIT_CALLBACK … … 167 140 MarkedBlock::Handle* findBlockForAllocation(LocalAllocator&); 168 141 169 MarkedBlock::Handle* tryAllocateBlock( );142 MarkedBlock::Handle* tryAllocateBlock(Heap&); 170 143 171 144 Vector<MarkedBlock::Handle*> m_blocks; … … 174 147 // Mutator uses this to guard resizing the bitvectors. Those things in the GC that may run 175 148 // concurrently to the mutator must lock this when accessing the bitvectors. 176 #define BLOCK_DIRECTORY_BIT_DECLARATION(lowerBitName, capitalBitName) \ 177 FastBitVector m_ ## lowerBitName; 178 FOR_EACH_BLOCK_DIRECTORY_BIT(BLOCK_DIRECTORY_BIT_DECLARATION) 179 #undef BLOCK_DIRECTORY_BIT_DECLARATION 149 BlockDirectoryBits m_bits; 180 150 Lock m_bitvectorLock; 181 151 Lock m_localAllocatorsLock; … … 191 161 // FIXME: All of these should probably be references. 192 162 // https://bugs.webkit.org/show_bug.cgi?id=166988 193 Heap* m_heap { nullptr };194 163 Subspace* m_subspace { nullptr }; 195 164 BlockDirectory* m_nextDirectory { nullptr }; -
trunk/Source/JavaScriptCore/heap/BlockDirectoryInlines.h
r227617 r252452 34 34 template <typename Functor> inline void BlockDirectory::forEachBlock(const Functor& functor) 35 35 { 36 m_ live.forEachSetBit(36 m_bits.live().forEachSetBit( 37 37 [&] (size_t index) { 38 38 functor(m_blocks[index]); … … 42 42 template <typename Functor> inline void BlockDirectory::forEachNotEmptyBlock(const Functor& functor) 43 43 { 44 m_ markingNotEmpty.forEachSetBit(44 m_bits.markingNotEmpty().forEachSetBit( 45 45 [&] (size_t index) { 46 46 functor(m_blocks[index]); -
trunk/Source/JavaScriptCore/heap/CompleteSubspace.cpp
r252302 r252452 33 33 #include "LocalAllocatorInlines.h" 34 34 #include "MarkedBlockInlines.h" 35 #include "MarkedSpaceInlines.h" 35 36 #include "PreventCollectionScope.h" 36 37 #include "SubspaceInlines.h" … … 80 81 dataLog("Creating BlockDirectory/LocalAllocator for ", m_name, ", ", attributes(), ", ", sizeClass, ".\n"); 81 82 82 std::unique_ptr<BlockDirectory> uniqueDirectory = makeUnique<BlockDirectory>( m_space.heap(),sizeClass);83 std::unique_ptr<BlockDirectory> uniqueDirectory = makeUnique<BlockDirectory>(sizeClass); 83 84 BlockDirectory* directory = uniqueDirectory.get(); 84 85 m_directories.append(WTFMove(uniqueDirectory)); … … 106 107 107 108 directory->setNextDirectoryInSubspace(m_firstDirectory); 108 m_alignedMemoryAllocator->registerDirectory( directory);109 m_alignedMemoryAllocator->registerDirectory(m_space.heap(), directory); 109 110 WTF::storeStoreFence(); 110 111 m_firstDirectory = directory; … … 128 129 129 130 if (Allocator allocator = allocatorFor(size, AllocatorForMode::EnsureAllocator)) 130 return allocator.allocate( deferralContext, AllocationFailureMode::ReturnNull);131 return allocator.allocate(vm.heap, deferralContext, AllocationFailureMode::ReturnNull); 131 132 132 133 if (size <= Options::preciseAllocationCutoff() -
trunk/Source/JavaScriptCore/heap/CompleteSubspaceInlines.h
r251690 r252452 37 37 38 38 if (Allocator allocator = allocatorForNonVirtual(size, AllocatorForMode::AllocatorIfExists)) 39 return allocator.allocate( deferralContext, failureMode);39 return allocator.allocate(vm.heap, deferralContext, failureMode); 40 40 return allocateSlow(vm, size, deferralContext, failureMode); 41 41 } -
trunk/Source/JavaScriptCore/heap/IsoCellSet.cpp
r252298 r252452 62 62 return nullptr; 63 63 auto locker = holdLock(m_lock); 64 auto bits = m_directory.m_ markingNotEmpty& m_set.m_blocksWithBits;64 auto bits = m_directory.m_bits.markingNotEmpty() & m_set.m_blocksWithBits; 65 65 m_index = bits.findBit(m_index, true); 66 66 if (m_index >= m_directory.m_blocks.size()) { -
trunk/Source/JavaScriptCore/heap/IsoCellSetInlines.h
r252385 r252452 71 71 { 72 72 BlockDirectory& directory = m_subspace.m_directory; 73 (directory.m_ markingNotEmpty& m_blocksWithBits).forEachSetBit(73 (directory.m_bits.markingNotEmpty() & m_blocksWithBits).forEachSetBit( 74 74 [&] (size_t blockIndex) { 75 75 MarkedBlock::Handle* block = directory.m_blocks[blockIndex]; -
trunk/Source/JavaScriptCore/heap/IsoSubspace.cpp
r252385 r252452 33 33 #include "IsoSubspaceInlines.h" 34 34 #include "LocalAllocatorInlines.h" 35 #include "MarkedSpaceInlines.h" 35 36 36 37 namespace JSC { … … 38 39 IsoSubspace::IsoSubspace(CString name, Heap& heap, HeapCellType* heapCellType, size_t size, uint8_t numberOfLowerTierCells) 39 40 : Subspace(name, heap) 40 , m_size(size) 41 , m_directory(&heap, WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(size)) 41 , m_directory(WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(size)) 42 42 , m_localAllocator(&m_directory) 43 43 , m_isoAlignedMemoryAllocator(makeUnique<IsoAlignedMemoryAllocator>()) 44 , m_remainingLowerTierCellCount(numberOfLowerTierCells)45 44 { 45 m_remainingLowerTierCellCount = numberOfLowerTierCells; 46 ASSERT(WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(size) == cellSize()); 46 47 ASSERT(numberOfLowerTierCells <= MarkedBlock::maxNumberOfLowerTierCells); 47 48 m_isIsoSubspace = true; … … 51 52 m_directory.setSubspace(this); 52 53 m_space.addBlockDirectory(locker, &m_directory); 53 m_alignedMemoryAllocator->registerDirectory( &m_directory);54 m_alignedMemoryAllocator->registerDirectory(heap, &m_directory); 54 55 m_firstDirectory = &m_directory; 55 56 } … … 112 113 } 113 114 if (m_remainingLowerTierCellCount) { 114 size_t size = WTF::roundUpToMultipleOf<MarkedSpace::sizeStep>(m_size); 115 PreciseAllocation* allocation = PreciseAllocation::createForLowerTier(*m_space.heap(), size, this, --m_remainingLowerTierCellCount); 115 PreciseAllocation* allocation = PreciseAllocation::createForLowerTier(m_space.heap(), cellSize(), this, --m_remainingLowerTierCellCount); 116 116 return revive(allocation); 117 117 } -
trunk/Source/JavaScriptCore/heap/IsoSubspace.h
r252385 r252452 41 41 JS_EXPORT_PRIVATE ~IsoSubspace(); 42 42 43 size_t size() const { return m_size; }43 size_t cellSize() { return m_directory.cellSize(); } 44 44 45 45 Allocator allocatorFor(size_t, AllocatorForMode) override; … … 64 64 void didBeginSweepingToFreeList(MarkedBlock::Handle*) override; 65 65 66 size_t m_size;67 66 BlockDirectory m_directory; 68 67 LocalAllocator m_localAllocator; … … 70 69 SentinelLinkedList<PreciseAllocation, PackedRawSentinelNode<PreciseAllocation>> m_lowerTierFreeList; 71 70 SentinelLinkedList<IsoCellSet, PackedRawSentinelNode<IsoCellSet>> m_cellSets; 72 uint8_t m_remainingLowerTierCellCount { 0 };73 71 }; 74 72 75 73 ALWAYS_INLINE Allocator IsoSubspace::allocatorForNonVirtual(size_t size, AllocatorForMode) 76 74 { 77 RELEASE_ASSERT( size == this->size());75 RELEASE_ASSERT(WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(size) == cellSize()); 78 76 return Allocator(&m_localAllocator); 79 77 } -
trunk/Source/JavaScriptCore/heap/IsoSubspaceInlines.h
r252385 r252452 28 28 namespace JSC { 29 29 30 ALWAYS_INLINE void* IsoSubspace::allocateNonVirtual(VM& , size_t size, GCDeferralContext* deferralContext, AllocationFailureMode failureMode)30 ALWAYS_INLINE void* IsoSubspace::allocateNonVirtual(VM& vm, size_t size, GCDeferralContext* deferralContext, AllocationFailureMode failureMode) 31 31 { 32 RELEASE_ASSERT( size == this->size());32 RELEASE_ASSERT(WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(size) == cellSize()); 33 33 Allocator allocator = allocatorForNonVirtual(size, AllocatorForMode::MustAlreadyHaveAllocator); 34 void* result = allocator.allocate( deferralContext, failureMode);34 void* result = allocator.allocate(vm.heap, deferralContext, failureMode); 35 35 return result; 36 36 } -
trunk/Source/JavaScriptCore/heap/IsoSubspacePerVM.cpp
r252385 r252452 42 42 { 43 43 auto locker = holdLock(m_perVM.m_lock); 44 m_perVM.m_subspacePerVM.remove(&space().heap() ->vm());44 m_perVM.m_subspacePerVM.remove(&space().heap().vm()); 45 45 } 46 46 -
trunk/Source/JavaScriptCore/heap/LocalAllocator.cpp
r252298 r252452 111 111 } 112 112 113 void* LocalAllocator::allocateSlowCase( GCDeferralContext* deferralContext, AllocationFailureMode failureMode)113 void* LocalAllocator::allocateSlowCase(Heap& heap, GCDeferralContext* deferralContext, AllocationFailureMode failureMode) 114 114 { 115 115 SuperSamplerScope superSamplerScope(false); 116 Heap& heap = *m_directory->m_heap;117 116 ASSERT(heap.vm().currentThreadIsHoldingAPILock()); 118 doTestCollectionsIfNeeded( deferralContext);117 doTestCollectionsIfNeeded(heap, deferralContext); 119 118 120 119 ASSERT(!m_directory->markedSpace().isIterating()); … … 130 129 // happens when running WebKit tests, which inject a callback into the GC's finalization. 131 130 if (UNLIKELY(m_currentBlock)) 132 return allocate( deferralContext, failureMode);131 return allocate(heap, deferralContext, failureMode); 133 132 134 133 void* result = tryAllocateWithoutCollecting(); … … 143 142 } 144 143 145 MarkedBlock::Handle* block = m_directory->tryAllocateBlock( );144 MarkedBlock::Handle* block = m_directory->tryAllocateBlock(heap); 146 145 if (!block) { 147 146 if (failureMode == AllocationFailureMode::Assert) … … 250 249 } 251 250 252 void LocalAllocator::doTestCollectionsIfNeeded( GCDeferralContext* deferralContext)251 void LocalAllocator::doTestCollectionsIfNeeded(Heap& heap, GCDeferralContext* deferralContext) 253 252 { 254 253 if (!Options::slowPathAllocsBetweenGCs()) … … 257 256 static unsigned allocationCount = 0; 258 257 if (!allocationCount) { 259 if (! m_directory->m_heap->isDeferred()) {258 if (!heap.isDeferred()) { 260 259 if (deferralContext) 261 260 deferralContext->m_shouldGC = true; 262 261 else 263 m_directory->m_heap->collectNow(Sync, CollectionScope::Full);262 heap.collectNow(Sync, CollectionScope::Full); 264 263 } 265 264 } -
trunk/Source/JavaScriptCore/heap/LocalAllocator.h
r244088 r252452 35 35 class BlockDirectory; 36 36 class GCDeferralContext; 37 class Heap; 37 38 38 39 class LocalAllocator : public BasicRawSentinelNode<LocalAllocator> { … … 43 44 ~LocalAllocator(); 44 45 45 void* allocate( GCDeferralContext*, AllocationFailureMode);46 void* allocate(Heap&, GCDeferralContext*, AllocationFailureMode); 46 47 47 48 unsigned cellSize() const { return m_freeList.cellSize(); } … … 61 62 62 63 void reset(); 63 JS_EXPORT_PRIVATE void* allocateSlowCase( GCDeferralContext*, AllocationFailureMode failureMode);64 JS_EXPORT_PRIVATE void* allocateSlowCase(Heap&, GCDeferralContext*, AllocationFailureMode); 64 65 void didConsumeFreeList(); 65 66 void* tryAllocateWithoutCollecting(); 66 67 void* tryAllocateIn(MarkedBlock::Handle*); 67 68 void* allocateIn(MarkedBlock::Handle*); 68 ALWAYS_INLINE void doTestCollectionsIfNeeded( GCDeferralContext*);69 ALWAYS_INLINE void doTestCollectionsIfNeeded(Heap&, GCDeferralContext*); 69 70 70 71 BlockDirectory* m_directory; -
trunk/Source/JavaScriptCore/heap/LocalAllocatorInlines.h
r244088 r252452 31 31 namespace JSC { 32 32 33 ALWAYS_INLINE void* LocalAllocator::allocate( GCDeferralContext* deferralContext, AllocationFailureMode failureMode)33 ALWAYS_INLINE void* LocalAllocator::allocate(Heap& heap, GCDeferralContext* deferralContext, AllocationFailureMode failureMode) 34 34 { 35 35 if (validateDFGDoesGC) 36 RELEASE_ASSERT( m_directory->heap()->expectDoesGC());36 RELEASE_ASSERT(heap.expectDoesGC()); 37 37 return m_freeList.allocate( 38 38 [&] () -> HeapCell* { 39 sanitizeStackForVM( m_directory->heap()->vm());40 return static_cast<HeapCell*>(allocateSlowCase( deferralContext, failureMode));39 sanitizeStackForVM(heap.vm()); 40 return static_cast<HeapCell*>(allocateSlowCase(heap, deferralContext, failureMode)); 41 41 }); 42 42 } -
trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp
r252298 r252452 378 378 directory()->forEachBitVectorWithName( 379 379 holdLock(directory()->bitvectorLock()), 380 [&] (FastBitVector& bitvector, const char* name) {381 out.print(comma, name, ":", bitvector[index()] ? "YES" : "no");380 [&](auto vectorRef, const char* name) { 381 out.print(comma, name, ":", vectorRef[index()] ? "YES" : "no"); 382 382 }); 383 383 } -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r252385 r252452 28 28 #include "JSCInlines.h" 29 29 #include "MarkedBlockInlines.h" 30 #include "MarkedSpaceInlines.h" 30 31 #include <wtf/ListDump.h> 31 32 … … 196 197 197 198 MarkedSpace::MarkedSpace(Heap* heap) 198 : m_heap(heap) 199 { 199 { 200 ASSERT_UNUSED(heap, heap == &this->heap()); 200 201 initializeSizeClassForStepSize(); 201 202 } … … 235 236 void MarkedSpace::sweepBlocks() 236 237 { 237 m_heap->sweeper().stopSweeping();238 heap().sweeper().stopSweeping(); 238 239 forEachDirectory( 239 240 [&] (BlockDirectory& directory) -> IterationStatus { … … 271 272 void MarkedSpace::prepareForAllocation() 272 273 { 273 ASSERT(!Thread::mayBeGCThread() || m_heap->worldIsStopped());274 ASSERT(!Thread::mayBeGCThread() || heap().worldIsStopped()); 274 275 for (Subspace* subspace : m_subspaces) 275 276 subspace->prepareForAllocation(); … … 277 278 m_activeWeakSets.takeFrom(m_newActiveWeakSets); 278 279 279 if ( m_heap->collectionScope() == CollectionScope::Eden)280 if (heap().collectionScope() == CollectionScope::Eden) 280 281 m_preciseAllocationsNurseryOffsetForSweep = m_preciseAllocationsNurseryOffset; 281 282 else … … 299 300 m_newActiveWeakSets.forEach(visit); 300 301 301 if ( m_heap->collectionScope() == CollectionScope::Full)302 if (heap().collectionScope() == CollectionScope::Full) 302 303 m_activeWeakSets.forEach(visit); 303 304 } … … 311 312 m_newActiveWeakSets.forEach(visit); 312 313 313 if ( m_heap->collectionScope() == CollectionScope::Full)314 if (heap().collectionScope() == CollectionScope::Full) 314 315 m_activeWeakSets.forEach(visit); 315 316 } … … 357 358 void MarkedSpace::prepareForMarking() 358 359 { 359 if ( m_heap->collectionScope() == CollectionScope::Eden)360 if (heap().collectionScope() == CollectionScope::Eden) 360 361 m_preciseAllocationsOffsetForThisCollection = m_preciseAllocationsNurseryOffset; 361 362 else … … 417 418 void MarkedSpace::beginMarking() 418 419 { 419 if ( m_heap->collectionScope() == CollectionScope::Full) {420 if (heap().collectionScope() == CollectionScope::Full) { 420 421 forEachDirectory( 421 422 [&] (BlockDirectory& directory) -> IterationStatus { … … 552 553 void MarkedSpace::snapshotUnswept() 553 554 { 554 if ( m_heap->collectionScope() == CollectionScope::Eden) {555 if (heap().collectionScope() == CollectionScope::Eden) { 555 556 forEachDirectory( 556 557 [&] (BlockDirectory& directory) -> IterationStatus { -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r252385 r252452 96 96 ~MarkedSpace(); 97 97 98 Heap * heap() const { return m_heap; }98 Heap& heap() const; 99 99 100 100 void lastChanceToFinalize(); // Must call stopAllocatingForGood first. … … 214 214 PreciseAllocation** m_preciseAllocationsForThisCollectionEnd { nullptr }; 215 215 216 Heap* m_heap;217 216 size_t m_capacity { 0 }; 218 217 HeapVersion m_markingVersion { initialVersion }; -
trunk/Source/JavaScriptCore/heap/MarkedSpaceInlines.h
r252302 r252452 30 30 31 31 namespace JSC { 32 33 ALWAYS_INLINE Heap& MarkedSpace::heap() const 34 { 35 return *bitwise_cast<Heap*>(bitwise_cast<uintptr_t>(this) - OBJECT_OFFSETOF(Heap, m_objectSpace)); 36 } 32 37 33 38 template<typename Functor> inline void MarkedSpace::forEachLiveCell(HeapIterationScope&, const Functor& functor) -
trunk/Source/JavaScriptCore/heap/Subspace.cpp
r252385 r252452 32 32 #include "JSCInlines.h" 33 33 #include "MarkedBlockInlines.h" 34 #include "MarkedSpaceInlines.h" 34 35 #include "ParallelSourceAdapter.h" 35 36 #include "PreventCollectionScope.h" … … 50 51 m_directoryForEmptyAllocation = m_alignedMemoryAllocator->firstDirectory(); 51 52 52 Heap& heap = *m_space.heap();53 Heap& heap = m_space.heap(); 53 54 heap.objectSpace().m_subspaces.append(this); 54 55 m_alignedMemoryAllocator->registerSubspace(this); -
trunk/Source/JavaScriptCore/heap/Subspace.h
r252385 r252452 120 120 121 121 bool m_isIsoSubspace { false }; 122 protected: 123 uint8_t m_remainingLowerTierCellCount { 0 }; 122 124 }; 123 125 -
trunk/Source/WTF/ChangeLog
r252345 r252452 1 2019-11-14 Yusuke Suzuki <ysuzuki@apple.com> 2 3 [JSC] BlockDirectory's bits should be compact 4 https://bugs.webkit.org/show_bug.cgi?id=204149 5 6 Reviewed by Robin Morisset. 7 8 * wtf/FastBitVector.h: 9 (WTF::fastBitVectorArrayLength): 10 (WTF::FastBitVectorImpl::unsafeWords): 11 (WTF::FastBitVectorImpl::unsafeWords const): 12 (WTF::FastBitReference::FastBitReference): 13 (WTF::FastBitReference::operator bool const): 14 (WTF::FastBitReference::operator=): 15 (WTF::FastBitVector::at): 16 (WTF::FastBitVector::operator[]): 17 (WTF::FastBitVector::BitReference::BitReference): Deleted. 18 (WTF::FastBitVector::BitReference::operator bool const): Deleted. 19 (WTF::FastBitVector::BitReference::operator=): Deleted. 20 1 21 2019-11-11 Ross Kirsling <ross.kirsling@sony.com> 2 22 -
trunk/Source/WTF/wtf/FastBitVector.h
r248546 r252452 36 36 class PrintStream; 37 37 38 inline size_t fastBitVectorArrayLength(size_t numBits) { return (numBits + 31) / 32; }38 inline constexpr size_t fastBitVectorArrayLength(size_t numBits) { return (numBits + 31) / 32; } 39 39 40 40 class FastBitVectorWordView { … … 422 422 423 423 typename Words::ViewType wordView() const { return m_words.view(); } 424 425 Words& unsafeWords() { return m_words; } 426 const Words& unsafeWords() const { return m_words; } 424 427 425 428 private: … … 436 439 Words m_words; 437 440 }; 441 442 class FastBitReference { 443 WTF_MAKE_FAST_ALLOCATED; 444 public: 445 FastBitReference() = default; 446 447 FastBitReference(uint32_t* word, uint32_t mask) 448 : m_word(word) 449 , m_mask(mask) 450 { 451 } 452 453 explicit operator bool() const 454 { 455 return !!(*m_word & m_mask); 456 } 457 458 FastBitReference& operator=(bool value) 459 { 460 if (value) 461 *m_word |= m_mask; 462 else 463 *m_word &= ~m_mask; 464 return *this; 465 } 466 467 private: 468 uint32_t* m_word { nullptr }; 469 uint32_t m_mask { 0 }; 470 }; 471 472 438 473 439 474 class FastBitVector : public FastBitVectorImpl<FastBitVectorWordOwner> { … … 519 554 } 520 555 521 class BitReference { 522 public: 523 BitReference() { } 524 525 BitReference(uint32_t* word, uint32_t mask) 526 : m_word(word) 527 , m_mask(mask) 528 { 529 } 530 531 explicit operator bool() const 532 { 533 return !!(*m_word & m_mask); 534 } 535 536 BitReference& operator=(bool value) 537 { 538 if (value) 539 *m_word |= m_mask; 540 else 541 *m_word &= ~m_mask; 542 return *this; 543 } 544 545 private: 546 uint32_t* m_word { nullptr }; 547 uint32_t m_mask { 0 }; 548 }; 549 550 BitReference at(size_t index) 556 FastBitReference at(size_t index) 551 557 { 552 558 ASSERT_WITH_SECURITY_IMPLICATION(index < numBits()); 553 return BitReference(&m_words.word(index >> 5), 1 << (index & 31));554 } 555 556 BitReference operator[](size_t index)559 return FastBitReference(&m_words.word(index >> 5), 1 << (index & 31)); 560 } 561 562 FastBitReference operator[](size_t index) 557 563 { 558 564 return at(index);
Note: See TracChangeset
for help on using the changeset viewer.