Changeset 195836 in webkit
- Timestamp:
- Jan 29, 2016 12:37:52 PM (8 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r195831 r195836 1 2016-01-29 Andreas Kling <akling@apple.com> 2 3 Shrink CodeBlock! 4 <https://webkit.org/b/153640> 5 6 Reviewed by Saam Barati. 7 8 Shrink CodeBlock by 112 bytes (from 640 to 528) by employing 9 these sophisticated tricks: 10 11 - Remove members that are not used by anyone. 12 - Don't cache both VM* and Heap* in members. 13 - Reorder members to minimize struct padding. 14 - Use RefCountedArray instead of Vector for arrays that never resize. 15 - Put a not-always-present HashMap in a std::unique_ptr. 16 17 This increases CodeBlock space efficiency by 20%, as we can now 18 fit 30 of them in a MarkedBlock, up from 25.) 19 20 * bytecode/CodeBlock.cpp: 21 (JSC::CodeBlock::CodeBlock): 22 (JSC::CodeBlock::finishCreation): 23 (JSC::CodeBlock::setNumParameters): 24 (JSC::CodeBlock::jettison): 25 (JSC::CodeBlock::noticeIncomingCall): 26 (JSC::CodeBlock::resultProfileForBytecodeOffset): 27 * bytecode/CodeBlock.h: 28 (JSC::CodeBlock::setJITCode): 29 (JSC::CodeBlock::capabilityLevelState): 30 (JSC::CodeBlock::codeType): 31 (JSC::CodeBlock::ensureResultProfile): 32 (JSC::CodeBlock::heap): 33 1 34 2016-01-29 Saam barati <sbarati@apple.com> 2 35 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r195576 r195836 1741 1741 : JSCell(*vm, structure) 1742 1742 , m_globalObject(other.m_globalObject) 1743 , m_heap(other.m_heap)1744 1743 , m_numCalleeLocals(other.m_numCalleeLocals) 1745 1744 , m_numVars(other.m_numVars) 1746 , m_isConstructor(other.m_isConstructor)1747 1745 , m_shouldAlwaysBeInlined(true) 1746 #if ENABLE(JIT) 1747 , m_capabilityLevelState(DFG::CapabilityLevelNotSet) 1748 #endif 1748 1749 , m_didFailFTLCompilation(false) 1749 1750 , m_hasBeenCompiledWithFTL(false) 1751 , m_isConstructor(other.m_isConstructor) 1752 , m_isStrictMode(other.m_isStrictMode) 1753 , m_needsActivation(other.m_needsActivation) 1754 , m_codeType(other.m_codeType) 1750 1755 , m_unlinkedCode(*other.m_vm, this, other.m_unlinkedCode.get()) 1751 1756 , m_hasDebuggerStatement(false) … … 1758 1763 , m_scopeRegister(other.m_scopeRegister) 1759 1764 , m_lexicalEnvironmentRegister(other.m_lexicalEnvironmentRegister) 1760 , m_isStrictMode(other.m_isStrictMode) 1761 , m_needsActivation(other.m_needsActivation) 1765 , m_hash(other.m_hash) 1762 1766 , m_source(other.m_source) 1763 1767 , m_sourceOffset(other.m_sourceOffset) 1764 1768 , m_firstLineColumnOffset(other.m_firstLineColumnOffset) 1765 , m_codeType(other.m_codeType)1766 1769 , m_constantRegisters(other.m_constantRegisters) 1767 1770 , m_constantsSourceCodeRepresentation(other.m_constantsSourceCodeRepresentation) … … 1772 1775 , m_reoptimizationRetryCounter(0) 1773 1776 , m_creationTime(std::chrono::steady_clock::now()) 1774 , m_hash(other.m_hash)1775 #if ENABLE(JIT)1776 , m_capabilityLevelState(DFG::CapabilityLevelNotSet)1777 #endif1778 1777 { 1779 1778 m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed); 1780 1779 1781 ASSERT( m_heap->isDeferred());1780 ASSERT(heap()->isDeferred()); 1782 1781 ASSERT(m_scopeRegister.isLocal()); 1783 1782 … … 1802 1801 } 1803 1802 1804 m_heap->m_codeBlocks.add(this);1803 heap()->m_codeBlocks.add(this); 1805 1804 } 1806 1805 … … 1809 1808 : JSCell(*vm, structure) 1810 1809 , m_globalObject(scope->globalObject()->vm(), this, scope->globalObject()) 1811 , m_heap(&m_globalObject->vm().heap)1812 1810 , m_numCalleeLocals(unlinkedCodeBlock->m_numCalleeLocals) 1813 1811 , m_numVars(unlinkedCodeBlock->m_numVars) 1814 , m_isConstructor(unlinkedCodeBlock->isConstructor())1815 1812 , m_shouldAlwaysBeInlined(true) 1813 #if ENABLE(JIT) 1814 , m_capabilityLevelState(DFG::CapabilityLevelNotSet) 1815 #endif 1816 1816 , m_didFailFTLCompilation(false) 1817 1817 , m_hasBeenCompiledWithFTL(false) 1818 , m_isConstructor(unlinkedCodeBlock->isConstructor()) 1819 , m_isStrictMode(unlinkedCodeBlock->isStrictMode()) 1820 , m_needsActivation(unlinkedCodeBlock->hasActivationRegister() && unlinkedCodeBlock->codeType() == FunctionCode) 1821 , m_codeType(unlinkedCodeBlock->codeType()) 1818 1822 , m_unlinkedCode(m_globalObject->vm(), this, unlinkedCodeBlock) 1819 1823 , m_hasDebuggerStatement(false) … … 1825 1829 , m_scopeRegister(unlinkedCodeBlock->scopeRegister()) 1826 1830 , m_lexicalEnvironmentRegister(unlinkedCodeBlock->activationRegister()) 1827 , m_isStrictMode(unlinkedCodeBlock->isStrictMode())1828 , m_needsActivation(unlinkedCodeBlock->hasActivationRegister() && unlinkedCodeBlock->codeType() == FunctionCode)1829 1831 , m_source(sourceProvider) 1830 1832 , m_sourceOffset(sourceOffset) 1831 1833 , m_firstLineColumnOffset(firstLineColumnOffset) 1832 , m_codeType(unlinkedCodeBlock->codeType())1833 1834 , m_osrExitCounter(0) 1834 1835 , m_optimizationDelayCounter(0) 1835 1836 , m_reoptimizationRetryCounter(0) 1836 1837 , m_creationTime(std::chrono::steady_clock::now()) 1837 #if ENABLE(JIT)1838 , m_capabilityLevelState(DFG::CapabilityLevelNotSet)1839 #endif1840 1838 { 1841 1839 m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed); 1842 1840 1843 ASSERT( m_heap->isDeferred());1841 ASSERT(heap()->isDeferred()); 1844 1842 ASSERT(m_scopeRegister.isLocal()); 1845 1843 … … 1895 1893 } 1896 1894 1897 m_functionDecls .resizeToFit(unlinkedCodeBlock->numberOfFunctionDecls());1895 m_functionDecls = RefCountedArray<WriteBarrier<FunctionExecutable>>(unlinkedCodeBlock->numberOfFunctionDecls()); 1898 1896 for (size_t count = unlinkedCodeBlock->numberOfFunctionDecls(), i = 0; i < count; ++i) { 1899 1897 UnlinkedFunctionExecutable* unlinkedExecutable = unlinkedCodeBlock->functionDecl(i); … … 1903 1901 } 1904 1902 1905 m_functionExprs .resizeToFit(unlinkedCodeBlock->numberOfFunctionExprs());1903 m_functionExprs = RefCountedArray<WriteBarrier<FunctionExecutable>>(unlinkedCodeBlock->numberOfFunctionExprs()); 1906 1904 for (size_t count = unlinkedCodeBlock->numberOfFunctionExprs(), i = 0; i < count; ++i) { 1907 1905 UnlinkedFunctionExecutable* unlinkedExecutable = unlinkedCodeBlock->functionExpr(i); … … 1959 1957 // Allocate metadata buffers for the bytecode 1960 1958 if (size_t size = unlinkedCodeBlock->numberOfLLintCallLinkInfos()) 1961 m_llintCallLinkInfos .resizeToFit(size);1959 m_llintCallLinkInfos = RefCountedArray<LLIntCallLinkInfo>(size); 1962 1960 if (size_t size = unlinkedCodeBlock->numberOfArrayProfiles()) 1963 1961 m_arrayProfiles.grow(size); 1964 1962 if (size_t size = unlinkedCodeBlock->numberOfArrayAllocationProfiles()) 1965 m_arrayAllocationProfiles .resizeToFit(size);1963 m_arrayAllocationProfiles = RefCountedArray<ArrayAllocationProfile>(size); 1966 1964 if (size_t size = unlinkedCodeBlock->numberOfValueProfiles()) 1967 m_valueProfiles .resizeToFit(size);1965 m_valueProfiles = RefCountedArray<ValueProfile>(size); 1968 1966 if (size_t size = unlinkedCodeBlock->numberOfObjectAllocationProfiles()) 1969 m_objectAllocationProfiles .resizeToFit(size);1967 m_objectAllocationProfiles = RefCountedArray<ObjectAllocationProfile>(size); 1970 1968 1971 1969 #if ENABLE(JIT) … … 2319 2317 dumpBytecode(); 2320 2318 2321 m_heap->m_codeBlocks.add(this);2322 m_heap->reportExtraMemoryAllocated(m_instructions.size() * sizeof(Instruction));2319 heap()->m_codeBlocks.add(this); 2320 heap()->reportExtraMemoryAllocated(m_instructions.size() * sizeof(Instruction)); 2323 2321 } 2324 2322 … … 2327 2325 : JSCell(*vm, structure) 2328 2326 , m_globalObject(globalObject->vm(), this, globalObject) 2329 , m_heap(&m_globalObject->vm().heap)2330 2327 , m_numCalleeLocals(0) 2331 2328 , m_numVars(0) 2332 , m_isConstructor(false)2333 2329 , m_shouldAlwaysBeInlined(false) 2330 #if ENABLE(JIT) 2331 , m_capabilityLevelState(DFG::CannotCompile) 2332 #endif 2334 2333 , m_didFailFTLCompilation(false) 2335 2334 , m_hasBeenCompiledWithFTL(false) 2335 , m_isConstructor(false) 2336 , m_isStrictMode(false) 2337 , m_needsActivation(false) 2338 , m_codeType(FunctionCode) 2336 2339 , m_hasDebuggerStatement(false) 2337 2340 , m_steppingMode(SteppingModeDisabled) … … 2339 2342 , m_ownerExecutable(m_globalObject->vm(), this, ownerExecutable) 2340 2343 , m_vm(vm) 2341 , m_isStrictMode(false)2342 , m_needsActivation(false)2343 , m_codeType(FunctionCode)2344 2344 , m_osrExitCounter(0) 2345 2345 , m_optimizationDelayCounter(0) 2346 2346 , m_reoptimizationRetryCounter(0) 2347 2347 , m_creationTime(std::chrono::steady_clock::now()) 2348 #if ENABLE(JIT) 2349 , m_capabilityLevelState(DFG::CannotCompile) 2350 #endif 2351 { 2352 ASSERT(m_heap->isDeferred()); 2348 { 2349 ASSERT(heap()->isDeferred()); 2353 2350 } 2354 2351 … … 2357 2354 Base::finishCreation(vm); 2358 2355 2359 m_heap->m_codeBlocks.add(this);2356 heap()->m_codeBlocks.add(this); 2360 2357 } 2361 2358 #endif … … 2400 2397 m_numParameters = newValue; 2401 2398 2402 m_argumentValueProfiles .resizeToFit(newValue);2399 m_argumentValueProfiles = RefCountedArray<ValueProfile>(newValue); 2403 2400 } 2404 2401 … … 3296 3293 #endif // ENABLE(DFG_JIT) 3297 3294 3298 DeferGCForAWhile deferGC(* m_heap);3295 DeferGCForAWhile deferGC(*heap()); 3299 3296 3300 3297 // We want to accomplish two things here: … … 3422 3419 return; 3423 3420 3424 if (!canInline( m_capabilityLevelState))3421 if (!canInline(capabilityLevelState())) 3425 3422 return; 3426 3423 … … 3471 3468 } 3472 3469 3473 if (callerCodeBlock-> m_capabilityLevelState== DFG::CapabilityLevelNotSet) {3470 if (callerCodeBlock->capabilityLevelState() == DFG::CapabilityLevelNotSet) { 3474 3471 dataLog("In call from ", *callerCodeBlock, " ", callerFrame->codeOrigin(), " to ", *this, ": caller's DFG capability level is not set.\n"); 3475 3472 CRASH(); 3476 3473 } 3477 3474 3478 if (canCompile(callerCodeBlock-> m_capabilityLevelState))3475 if (canCompile(callerCodeBlock->capabilityLevelState())) 3479 3476 return; 3480 3477 … … 4194 4191 ResultProfile* CodeBlock::resultProfileForBytecodeOffset(int bytecodeOffset) 4195 4192 { 4196 auto iterator = m_bytecodeOffsetToResultProfileIndexMap.find(bytecodeOffset); 4197 if (iterator == m_bytecodeOffsetToResultProfileIndexMap.end()) 4193 if (!m_bytecodeOffsetToResultProfileIndexMap) 4194 return nullptr; 4195 auto iterator = m_bytecodeOffsetToResultProfileIndexMap->find(bytecodeOffset); 4196 if (iterator == m_bytecodeOffsetToResultProfileIndexMap->end()) 4198 4197 return nullptr; 4199 4198 return &m_resultProfiles[iterator->value]; -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.h
r194840 r195836 123 123 124 124 WriteBarrier<JSGlobalObject> m_globalObject; 125 Heap* m_heap;126 125 127 126 public: … … 297 296 void setJITCode(PassRefPtr<JITCode> code) 298 297 { 299 ASSERT( m_heap->isDeferred());300 m_heap->reportExtraMemoryAllocated(code->size());298 ASSERT(heap()->isDeferred()); 299 heap()->reportExtraMemoryAllocated(code->size()); 301 300 ConcurrentJITLocker locker(m_lock); 302 301 WTF::storeStoreFence(); // This is probably not needed because the lock will also do something similar, but it's good to be paranoid. … … 324 323 DFG::CapabilityLevel computeCapabilityLevel(); 325 324 DFG::CapabilityLevel capabilityLevel(); 326 DFG::CapabilityLevel capabilityLevelState() { return m_capabilityLevelState; }325 DFG::CapabilityLevel capabilityLevelState() { return static_cast<DFG::CapabilityLevel>(m_capabilityLevelState); } 327 326 328 327 bool hasOptimizedReplacement(JITCode::JITType typeToReplace); … … 378 377 CodeType codeType() const 379 378 { 380 return m_codeType;379 return static_cast<CodeType>(m_codeType); 381 380 } 382 381 … … 461 460 profile = &m_resultProfiles.last(); 462 461 ASSERT(&m_resultProfiles.last() == &m_resultProfiles[m_resultProfiles.size() - 1]); 463 m_bytecodeOffsetToResultProfileIndexMap.add(bytecodeOffset, m_resultProfiles.size() - 1); 462 if (!m_bytecodeOffsetToResultProfileIndexMap) 463 m_bytecodeOffsetToResultProfileIndexMap = std::make_unique<BytecodeOffsetToResultProfileIndexMap>(); 464 m_bytecodeOffsetToResultProfileIndexMap->add(bytecodeOffset, m_resultProfiles.size() - 1); 464 465 } 465 466 return profile; … … 611 612 } 612 613 613 Heap* heap() const { return m_heap; }614 Heap* heap() const { return &m_vm->heap; } 614 615 JSGlobalObject* globalObject() { return m_globalObject.get(); } 615 616 … … 863 864 // FIXME: Make these remaining members private. 864 865 865 int m_numLocalRegistersForCalleeSaves;866 866 int m_numCalleeLocals; 867 867 int m_numVars; 868 bool m_isConstructor : 1;869 868 870 869 // This is intentionally public; it's the responsibility of anyone doing any … … 885 884 // concurrent compilation threads finish what they're doing. 886 885 mutable ConcurrentJITLock m_lock; 887 886 887 Atomic<bool> m_visitWeaklyHasBeenCalled; 888 888 889 bool m_shouldAlwaysBeInlined; // Not a bitfield because the JIT wants to store to it. 890 891 #if ENABLE(JIT) 892 unsigned m_capabilityLevelState : 2; // DFG::CapabilityLevel 893 #endif 894 889 895 bool m_allTransitionsHaveBeenMarked : 1; // Initialized and used on every GC. 890 896 891 897 bool m_didFailFTLCompilation : 1; 892 898 bool m_hasBeenCompiledWithFTL : 1; 899 bool m_isConstructor : 1; 900 bool m_isStrictMode : 1; 901 bool m_needsActivation : 1; 902 unsigned m_codeType : 2; // CodeType 893 903 894 904 // Internal methods for use by validation code. It would be private if it wasn't … … 1031 1041 VirtualRegister m_scopeRegister; 1032 1042 VirtualRegister m_lexicalEnvironmentRegister; 1033 1034 bool m_isStrictMode; 1035 bool m_needsActivation; 1036 1037 Atomic<bool> m_visitWeaklyHasBeenCalled; 1043 mutable CodeBlockHash m_hash; 1038 1044 1039 1045 RefPtr<SourceProvider> m_source; 1040 1046 unsigned m_sourceOffset; 1041 1047 unsigned m_firstLineColumnOffset; 1042 CodeType m_codeType; 1043 1044 Vector<LLIntCallLinkInfo> m_llintCallLinkInfos; 1048 1049 RefCountedArray<LLIntCallLinkInfo> m_llintCallLinkInfos; 1045 1050 SentinelLinkedList<LLIntCallLinkInfo, BasicRawSentinelNode<LLIntCallLinkInfo>> m_incomingLLIntCalls; 1046 1051 RefPtr<JITCode> m_jitCode; … … 1060 1065 CompressedLazyOperandValueProfileHolder m_lazyOperandValueProfiles; 1061 1066 #endif 1062 Vector<ValueProfile> m_argumentValueProfiles;1063 Vector<ValueProfile> m_valueProfiles;1067 RefCountedArray<ValueProfile> m_argumentValueProfiles; 1068 RefCountedArray<ValueProfile> m_valueProfiles; 1064 1069 SegmentedVector<RareCaseProfile, 8> m_rareCaseProfiles; 1065 1070 SegmentedVector<ResultProfile, 8> m_resultProfiles; 1066 HashMap<unsigned, unsigned, IntHash<unsigned>, WTF::UnsignedWithZeroKeyHashTraits<unsigned>> m_bytecodeOffsetToResultProfileIndexMap; 1067 Vector<ArrayAllocationProfile> m_arrayAllocationProfiles; 1071 typedef HashMap<unsigned, unsigned, IntHash<unsigned>, WTF::UnsignedWithZeroKeyHashTraits<unsigned>> BytecodeOffsetToResultProfileIndexMap; 1072 std::unique_ptr<BytecodeOffsetToResultProfileIndexMap> m_bytecodeOffsetToResultProfileIndexMap; 1073 RefCountedArray<ArrayAllocationProfile> m_arrayAllocationProfiles; 1068 1074 ArrayProfileVector m_arrayProfiles; 1069 Vector<ObjectAllocationProfile> m_objectAllocationProfiles;1075 RefCountedArray<ObjectAllocationProfile> m_objectAllocationProfiles; 1070 1076 1071 1077 // Constant Pool … … 1075 1081 Vector<WriteBarrier<Unknown>> m_constantRegisters; 1076 1082 Vector<SourceCodeRepresentation> m_constantsSourceCodeRepresentation; 1077 Vector<WriteBarrier<FunctionExecutable>> m_functionDecls;1078 Vector<WriteBarrier<FunctionExecutable>> m_functionExprs;1083 RefCountedArray<WriteBarrier<FunctionExecutable>> m_functionDecls; 1084 RefCountedArray<WriteBarrier<FunctionExecutable>> m_functionExprs; 1079 1085 1080 1086 WriteBarrier<CodeBlock> m_alternative; … … 1083 1089 1084 1090 BaselineExecutionCounter m_jitExecuteCounter; 1085 int32_t m_totalJITExecutions;1086 1091 uint32_t m_osrExitCounter; 1087 1092 uint16_t m_optimizationDelayCounter; … … 1090 1095 std::chrono::steady_clock::time_point m_creationTime; 1091 1096 1092 mutable CodeBlockHash m_hash;1093 1094 1097 std::unique_ptr<BytecodeLivenessAnalysis> m_livenessAnalysis; 1095 1098 1096 1099 std::unique_ptr<RareData> m_rareData; 1097 #if ENABLE(JIT)1098 DFG::CapabilityLevel m_capabilityLevelState;1099 #endif1100 1100 1101 1101 UnconditionalFinalizer m_unconditionalFinalizer;
Note: See TracChangeset
for help on using the changeset viewer.