Changeset 128141 in webkit
- Timestamp:
- Sep 10, 2012 8:02:46 PM (12 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r128122 r128141 1 2012-09-10 Geoffrey Garen <ggaren@apple.com> 2 3 Added large allocation support to MarkedSpace 4 https://bugs.webkit.org/show_bug.cgi?id=96214 5 6 Originally reviewed by Oliver Hunt, then I added a design revision by 7 suggested by Phil Pizlo. 8 9 I expanded the imprecise size classes to cover up to 32KB, then added 10 an mmap-based allocator for everything bigger. There's a lot of tuning 11 we could do in these size classes, but currently they're almost 12 completely unused, so I haven't done any tuning. 13 14 Subtle point: the large allocator is a degenerate case of our free list 15 logic. Its list only ever contains zero or one items. 16 17 * heap/Heap.h: 18 (JSC::Heap::allocateStructure): Pipe in size information. 19 20 * heap/MarkedAllocator.cpp: 21 (JSC::MarkedAllocator::tryAllocateHelper): Handle the case where we 22 find a free item in the sweep list but the item isn't big enough. This 23 can happen in the large allocator because it mixes sizes. 24 25 (JSC::MarkedAllocator::tryAllocate): 26 (JSC::MarkedAllocator::allocateSlowCase): More piping. 27 28 (JSC::MarkedAllocator::allocateBlock): Handle the oversize case. 29 30 (JSC::MarkedAllocator::addBlock): I moved the call to didAddBlock here 31 because it made more sense. 32 33 * heap/MarkedAllocator.h: 34 (MarkedAllocator): 35 (JSC::MarkedAllocator::allocate): 36 * heap/MarkedSpace.cpp: 37 (JSC::MarkedSpace::MarkedSpace): 38 (JSC::MarkedSpace::resetAllocators): 39 (JSC::MarkedSpace::canonicalizeCellLivenessData): 40 (JSC::MarkedSpace::isPagedOut): 41 (JSC::MarkedSpace::freeBlock): 42 * heap/MarkedSpace.h: 43 (MarkedSpace): 44 (JSC::MarkedSpace::allocatorFor): 45 (JSC::MarkedSpace::destructorAllocatorFor): 46 (JSC::MarkedSpace::allocateWithoutDestructor): 47 (JSC::MarkedSpace::allocateWithDestructor): 48 (JSC::MarkedSpace::allocateStructure): 49 (JSC::MarkedSpace::forEachBlock): 50 * runtime/Structure.h: 51 (JSC::Structure): More piping. 52 1 53 2012-09-10 Geoffrey Garen <ggaren@apple.com> 2 54 -
trunk/Source/JavaScriptCore/JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.def
r128084 r128141 64 64 ?addSlowCase@Identifier@JSC@@CA?AV?$PassRefPtr@VStringImpl@WTF@@@WTF@@PAVJSGlobalData@2@PAVStringImpl@4@@Z 65 65 ?addStaticGlobals@JSGlobalObject@JSC@@IAEXPAUGlobalPropertyInfo@12@H@Z 66 ?allocateSlowCase@MarkedAllocator@JSC@@AAEPAX XZ66 ?allocateSlowCase@MarkedAllocator@JSC@@AAEPAXI@Z 67 67 ?append@StringBuilder@WTF@@QAEXPBEI@Z 68 68 ?append@StringBuilder@WTF@@QAEXPB_WI@Z -
trunk/Source/JavaScriptCore/heap/Heap.h
r127829 r128141 186 186 void* allocateWithDestructor(size_t); 187 187 void* allocateWithoutDestructor(size_t); 188 void* allocateStructure( );188 void* allocateStructure(size_t); 189 189 190 190 static const size_t minExtraCost = 256; … … 373 373 } 374 374 375 inline void* Heap::allocateStructure( )376 { 377 return m_objectSpace.allocateStructure( );375 inline void* Heap::allocateStructure(size_t bytes) 376 { 377 return m_objectSpace.allocateStructure(bytes); 378 378 } 379 379 -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp
r124352 r128141 28 28 } 29 29 30 inline void* MarkedAllocator::tryAllocateHelper( )30 inline void* MarkedAllocator::tryAllocateHelper(size_t bytes) 31 31 { 32 32 if (!m_freeList.head) { … … 43 43 44 44 for (MarkedBlock*& block = m_blocksToSweep; block; block = block->next()) { 45 m_freeList = block->sweep(MarkedBlock::SweepToFreeList);46 if ( m_freeList.head) {47 m_currentBlock = block;48 break;45 MarkedBlock::FreeList freeList = block->sweep(MarkedBlock::SweepToFreeList); 46 if (!freeList.head) { 47 block->didConsumeFreeList(); 48 continue; 49 49 } 50 block->didConsumeFreeList(); 50 51 if (bytes > block->cellSize()) { 52 block->zapFreeList(freeList); 53 continue; 54 } 55 56 m_currentBlock = block; 57 m_freeList = freeList; 58 break; 51 59 } 52 60 … … 63 71 } 64 72 65 inline void* MarkedAllocator::tryAllocate( )73 inline void* MarkedAllocator::tryAllocate(size_t bytes) 66 74 { 67 75 ASSERT(!m_heap->isBusy()); 68 76 m_heap->m_operationInProgress = Allocation; 69 void* result = tryAllocateHelper( );77 void* result = tryAllocateHelper(bytes); 70 78 m_heap->m_operationInProgress = NoOperation; 71 79 return result; 72 80 } 73 81 74 void* MarkedAllocator::allocateSlowCase( )82 void* MarkedAllocator::allocateSlowCase(size_t bytes) 75 83 { 76 84 ASSERT(m_heap->globalData()->apiLock().currentThreadIsHoldingLock()); … … 83 91 m_heap->didAllocate(m_freeList.bytes); 84 92 85 void* result = tryAllocate( );93 void* result = tryAllocate(bytes); 86 94 87 95 if (LIKELY(result != 0)) … … 91 99 m_heap->collect(Heap::DoNotSweep); 92 100 93 result = tryAllocate( );101 result = tryAllocate(bytes); 94 102 if (result) 95 103 return result; … … 98 106 ASSERT(!m_heap->shouldCollect()); 99 107 100 MarkedBlock* block = allocateBlock( );108 MarkedBlock* block = allocateBlock(bytes); 101 109 ASSERT(block); 102 110 addBlock(block); 103 111 104 result = tryAllocate( );112 result = tryAllocate(bytes); 105 113 ASSERT(result); 106 114 return result; 107 115 } 108 116 109 MarkedBlock* MarkedAllocator::allocateBlock( )117 MarkedBlock* MarkedAllocator::allocateBlock(size_t bytes) 110 118 { 111 MarkedBlock* block = MarkedBlock::create(m_heap->blockAllocator().allocate(), m_heap, m_cellSize, m_cellsNeedDestruction, m_onlyContainsStructures); 112 m_markedSpace->didAddBlock(block); 113 return block; 119 size_t minBlockSize = MarkedBlock::blockSize; 120 size_t minAllocationSize = WTF::roundUpToMultipleOf(WTF::pageSize(), sizeof(MarkedBlock) + bytes); 121 size_t blockSize = std::max(minBlockSize, minAllocationSize); 122 123 size_t cellSize = m_cellSize ? m_cellSize : WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(bytes); 124 125 if (blockSize == MarkedBlock::blockSize) { 126 PageAllocationAligned allocation = m_heap->blockAllocator().allocate(); 127 return MarkedBlock::create(allocation, m_heap, cellSize, m_cellsNeedDestruction, m_onlyContainsStructures); 128 } 129 130 PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, MarkedBlock::blockSize, OSAllocator::JSGCHeapPages); 131 if (!static_cast<bool>(allocation)) 132 CRASH(); 133 return MarkedBlock::create(allocation, m_heap, cellSize, m_cellsNeedDestruction, m_onlyContainsStructures); 114 134 } 115 135 … … 122 142 m_blocksToSweep = m_currentBlock = block; 123 143 m_freeList = block->sweep(MarkedBlock::SweepToFreeList); 144 m_markedSpace->didAddBlock(block); 124 145 } 125 146 -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.h
r124265 r128141 26 26 bool cellsNeedDestruction() { return m_cellsNeedDestruction; } 27 27 bool onlyContainsStructures() { return m_onlyContainsStructures; } 28 void* allocate( );28 void* allocate(size_t); 29 29 Heap* heap() { return m_heap; } 30 30 … … 40 40 friend class LLIntOffsetsExtractor; 41 41 42 JS_EXPORT_PRIVATE void* allocateSlowCase( );43 void* tryAllocate( );44 void* tryAllocateHelper( );45 MarkedBlock* allocateBlock( );42 JS_EXPORT_PRIVATE void* allocateSlowCase(size_t); 43 void* tryAllocate(size_t); 44 void* tryAllocateHelper(size_t); 45 MarkedBlock* allocateBlock(size_t); 46 46 47 47 MarkedBlock::FreeList m_freeList; … … 76 76 } 77 77 78 inline void* MarkedAllocator::allocate( )78 inline void* MarkedAllocator::allocate(size_t bytes) 79 79 { 80 80 MarkedBlock::FreeCell* head = m_freeList.head; 81 // This is a light-weight fast path to cover the most common case.82 81 if (UNLIKELY(!head)) 83 return allocateSlowCase( );82 return allocateSlowCase(bytes); 84 83 85 84 m_freeList.head = head->next; -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r127202 r128141 91 91 } 92 92 93 m_largeAllocator.init(heap, this, 0, true, false); 93 94 m_structureAllocator.init(heap, this, WTF::roundUpToMultipleOf(32, sizeof(Structure)), true, true); 94 95 } … … 128 129 } 129 130 131 m_largeAllocator.reset(); 130 132 m_structureAllocator.reset(); 131 133 } … … 154 156 } 155 157 158 m_largeAllocator.zapFreeList(); 156 159 m_structureAllocator.zapFreeList(); 157 160 } … … 169 172 } 170 173 174 if (m_largeAllocator.isPagedOut(deadline)) 175 return true; 176 171 177 if (m_structureAllocator.isPagedOut(deadline)) 172 178 return true; … … 179 185 allocatorFor(block).removeBlock(block); 180 186 m_blocks.remove(block); 181 m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block)); 187 if (block->capacity() == MarkedBlock::blockSize) { 188 m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block)); 189 return; 190 } 191 192 MarkedBlock::destroy(block).deallocate(); 182 193 } 183 194 -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r124265 r128141 81 81 void* allocateWithDestructor(size_t); 82 82 void* allocateWithoutDestructor(size_t); 83 void* allocateStructure( );83 void* allocateStructure(size_t); 84 84 85 85 void resetAllocators(); … … 116 116 private: 117 117 friend class LLIntOffsetsExtractor; 118 119 // [ 32... 256]118 119 // [ 32... 512 ] 120 120 static const size_t preciseStep = MarkedBlock::atomSize; 121 static const size_t preciseCutoff = 256;121 static const size_t preciseCutoff = 512; 122 122 static const size_t preciseCount = preciseCutoff / preciseStep; 123 123 124 // [ 512... 2048]125 static const size_t impreciseStep = preciseCutoff;126 static const size_t impreciseCutoff = maxCellSize;124 // [ 1024... blockSize ] 125 static const size_t impreciseStep = 2 * preciseCutoff; 126 static const size_t impreciseCutoff = MarkedBlock::blockSize / 2; 127 127 static const size_t impreciseCount = impreciseCutoff / impreciseStep; 128 128 … … 134 134 Subspace m_destructorSpace; 135 135 Subspace m_normalSpace; 136 MarkedAllocator m_largeAllocator; 136 137 MarkedAllocator m_structureAllocator; 137 138 … … 163 164 inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes) 164 165 { 165 ASSERT(bytes && bytes <= maxCellSize);166 ASSERT(bytes); 166 167 if (bytes <= preciseCutoff) 167 168 return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep]; 168 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 169 if (bytes <= impreciseCutoff) 170 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 171 return m_largeAllocator; 169 172 } 170 173 … … 182 185 inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes) 183 186 { 184 ASSERT(bytes && bytes <= maxCellSize);187 ASSERT(bytes); 185 188 if (bytes <= preciseCutoff) 186 189 return m_destructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; 187 return m_destructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 190 if (bytes <= impreciseCutoff) 191 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 192 return m_largeAllocator; 188 193 } 189 194 190 195 inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes) 191 196 { 192 return allocatorFor(bytes).allocate( );197 return allocatorFor(bytes).allocate(bytes); 193 198 } 194 199 195 200 inline void* MarkedSpace::allocateWithDestructor(size_t bytes) 196 201 { 197 return destructorAllocatorFor(bytes).allocate( );198 } 199 200 inline void* MarkedSpace::allocateStructure( )201 { 202 return m_structureAllocator.allocate( );202 return destructorAllocatorFor(bytes).allocate(bytes); 203 } 204 205 inline void* MarkedSpace::allocateStructure(size_t bytes) 206 { 207 return m_structureAllocator.allocate(bytes); 203 208 } 204 209 … … 215 220 } 216 221 222 m_largeAllocator.forEachBlock(functor); 217 223 m_structureAllocator.forEachBlock(functor); 218 224 -
trunk/Source/JavaScriptCore/runtime/Structure.h
r128084 r128141 461 461 heap.globalData()->setInitializingObjectClass(&Structure::s_info); 462 462 #endif 463 JSCell* result = static_cast<JSCell*>(heap.allocateStructure( ));463 JSCell* result = static_cast<JSCell*>(heap.allocateStructure(sizeof(Structure))); 464 464 result->clearStructure(); 465 465 return result;
Note: See TracChangeset
for help on using the changeset viewer.