Changeset 278029 in webkit
- Timestamp:
- May 25, 2021 11:03:20 AM (3 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r277984 r278029 1 2021-05-25 Mark Lam <mark.lam@apple.com> 2 3 Reduce Baseline JIT emitted code size for op_jfalse, op_jtrue, op_get_from_scope, op_resolve_scope. 4 https://bugs.webkit.org/show_bug.cgi?id=226107 5 6 Reviewed by Saam Barati. 7 8 Benchmarking with JetStream2 and Speedometer2 on M1 Mac shows that performance is 9 neutral. 10 11 This patch reduces Baseline JIT emitted code side on a run of JetStream2 CLI by 12 another ~6.6M: 13 Base New Diff 14 15 BaselineJIT: 64955116 (61.946026 MB) 57991704 (55.305199 MB) 0.89x (reduction) 16 DFG: 36382012 (34.696590 MB) 36540652 (34.847881 MB) 1.00x 17 Thunk: 23217692 (22.142117 MB) 23115852 (22.044994 MB) 1.00x 18 InlineCache: 22228140 (21.198406 MB) 22262572 (21.231243 MB) 1.00x 19 FTL: 6025320 (5.746193 MB) 6164332 (5.878765 MB) 1.02x 20 Wasm: 2327604 (2.219776 MB) 2297036 (2.190624 MB) 0.99x 21 YarrJIT: 1547172 (1.475498 MB) 1522584 (1.452049 MB) 0.98x 22 CSSJIT: 0 0 23 Uncategorized: 0 0 24 25 Cumulative diff since the start of this effort to reduce Baseline JIT code size: 26 27 Base New Diff 28 29 BaselineJIT: 89089964 (84.962811 MB) 57991704 (55.305199 MB) 0.65x (reduction) 30 DFG: 39117360 (37.305222 MB) 36540652 (34.847881 MB) 0.93x (reduction) 31 Thunk: 23230968 (22.154778 MB) 23115852 (22.044994 MB) 1.00x 32 InlineCache: 22027416 (21.006981 MB) 22262572 (21.231243 MB) 1.01x 33 FTL: 6575772 (6.271145 MB) 6164332 (5.878765 MB) 0.94x (reduction) 34 Wasm: 2302724 (2.196049 MB) 2297036 (2.190624 MB) 1.00x 35 YarrJIT: 1538956 (1.467663 MB) 1522584 (1.452049 MB) 0.99x 36 CSSJIT: 0 0 37 Uncategorized: 0 0 38 39 * bytecode/CodeBlock.h: 40 (JSC::CodeBlock::offsetInMetadataTable): 41 (JSC::CodeBlock::offsetOfMetadataTable): 42 * jit/AssemblyHelpers.cpp: 43 (JSC::AssemblyHelpers::branchIfValue): 44 * jit/AssemblyHelpers.h: 45 (JSC::AssemblyHelpers::branchIfTruthy): 46 (JSC::AssemblyHelpers::branchIfFalsey): 47 * jit/JIT.cpp: 48 (JSC::JIT::privateCompileSlowCases): 49 * jit/JIT.h: 50 * jit/JITOpcodes.cpp: 51 (JSC::JIT::emit_op_jfalse): 52 (JSC::JIT::valueIsFalseyGenerator): 53 (JSC::JIT::emit_op_jtrue): 54 (JSC::JIT::valueIsTruthyGenerator): 55 * jit/JITOperations.cpp: 56 (JSC::JSC_DEFINE_JIT_OPERATION): 57 * jit/JITOperations.h: 58 * jit/JITPropertyAccess.cpp: 59 (JSC::JIT::emit_op_resolve_scope): 60 (JSC::JIT::generateOpResolveScopeThunk): 61 (JSC::JIT::slow_op_resolve_scopeGenerator): 62 (JSC::JIT::emitSlow_op_get_from_scope): 63 (JSC::JIT::emit_op_get_from_scope): 64 (JSC::JIT::generateOpGetFromScopeThunk): 65 (JSC::JIT::slow_op_get_from_scopeGenerator): 66 * jit/ThunkGenerators.cpp: 67 (JSC::popThunkStackPreservesAndHandleExceptionGenerator): 68 * runtime/GetPutInfo.h: 69 * runtime/JSGlobalObject.h: 70 (JSC::JSGlobalObject::offsetOfGlobalLexicalEnvironment): 71 (JSC::JSGlobalObject::offsetOfGlobalLexicalBindingEpoch): 72 1 73 2021-05-24 Robin Morisset <rmorisset@apple.com> 2 74 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.h
r277963 r278029 891 891 } 892 892 893 template<typename Metadata> 894 ptrdiff_t offsetInMetadataTable(Metadata* metadata) 895 { 896 return bitwise_cast<uint8_t*>(metadata) - bitwise_cast<uint8_t*>(metadataTable()); 897 } 898 893 899 size_t metadataSizeInBytes() 894 900 { … … 899 905 const void* instructionsRawPointer() { return m_instructionsRawPointer; } 900 906 907 static ptrdiff_t offsetOfMetadataTable() { return OBJECT_OFFSETOF(CodeBlock, m_metadata); } 901 908 static ptrdiff_t offsetOfInstructionsRawPointer() { return OBJECT_OFFSETOF(CodeBlock, m_instructionsRawPointer); } 902 909 -
trunk/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
r277850 r278029 798 798 } 799 799 800 AssemblyHelpers::JumpList AssemblyHelpers::branchIfValue(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg valueAsFPR, FPRReg tempFPR, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject*globalObject, bool invert)800 AssemblyHelpers::JumpList AssemblyHelpers::branchIfValue(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg valueAsFPR, FPRReg tempFPR, bool shouldCheckMasqueradesAsUndefined, Variant<JSGlobalObject*, GPRReg> globalObject, bool invert) 801 801 { 802 802 // Implements the following control flow structure: … … 830 830 isNotMasqueradesAsUndefined.append(branchTest8(Zero, Address(value.payloadGPR(), JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined))); 831 831 emitLoadStructure(vm, value.payloadGPR(), scratch, scratchIfShouldCheckMasqueradesAsUndefined); 832 move(TrustedImmPtr(globalObject), scratchIfShouldCheckMasqueradesAsUndefined); 832 if (WTF::holds_alternative<JSGlobalObject*>(globalObject)) 833 move(TrustedImmPtr(WTF::get<JSGlobalObject*>(globalObject)), scratchIfShouldCheckMasqueradesAsUndefined); 834 else 835 move(WTF::get<GPRReg>(globalObject), scratchIfShouldCheckMasqueradesAsUndefined); 833 836 isNotMasqueradesAsUndefined.append(branchPtr(NotEqual, Address(scratch, Structure::globalObjectOffset()), scratchIfShouldCheckMasqueradesAsUndefined)); 834 837 -
trunk/Source/JavaScriptCore/jit/AssemblyHelpers.h
r277850 r278029 45 45 #include "TypeofType.h" 46 46 #include "VM.h" 47 #include <wtf/Variant.h> 47 48 48 49 namespace JSC { … … 1897 1898 } 1898 1899 1899 JumpList branchIfValue(VM&, JSValueRegs, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg, FPRReg, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject*, bool negateResult);1900 JumpList branchIfTruthy(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg scratchFPR0, FPRReg scratchFPR1, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject*globalObject)1900 JumpList branchIfValue(VM&, JSValueRegs, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg, FPRReg, bool shouldCheckMasqueradesAsUndefined, Variant<JSGlobalObject*, GPRReg>, bool negateResult); 1901 JumpList branchIfTruthy(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg scratchFPR0, FPRReg scratchFPR1, bool shouldCheckMasqueradesAsUndefined, Variant<JSGlobalObject*, GPRReg> globalObject) 1901 1902 { 1902 1903 return branchIfValue(vm, value, scratch, scratchIfShouldCheckMasqueradesAsUndefined, scratchFPR0, scratchFPR1, shouldCheckMasqueradesAsUndefined, globalObject, false); 1903 1904 } 1904 JumpList branchIfFalsey(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg scratchFPR0, FPRReg scratchFPR1, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject*globalObject)1905 JumpList branchIfFalsey(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg scratchFPR0, FPRReg scratchFPR1, bool shouldCheckMasqueradesAsUndefined, Variant<JSGlobalObject*, GPRReg> globalObject) 1905 1906 { 1906 1907 return branchIfValue(vm, value, scratch, scratchIfShouldCheckMasqueradesAsUndefined, scratchFPR0, scratchFPR1, shouldCheckMasqueradesAsUndefined, globalObject, true); -
trunk/Source/JavaScriptCore/jit/JIT.cpp
r277936 r278029 590 590 DEFINE_SLOWCASE_OP(op_sub) 591 591 DEFINE_SLOWCASE_OP(op_has_enumerable_indexed_property) 592 #if !ENABLE(EXTRA_CTI_THUNKS) 592 593 DEFINE_SLOWCASE_OP(op_get_from_scope) 594 #endif 593 595 DEFINE_SLOWCASE_OP(op_put_to_scope) 594 596 … … 625 627 DEFINE_SLOWCASE_SLOW_OP(has_own_structure_property) 626 628 DEFINE_SLOWCASE_SLOW_OP(in_structure_property) 629 #if !ENABLE(EXTRA_CTI_THUNKS) 627 630 DEFINE_SLOWCASE_SLOW_OP(resolve_scope) 631 #endif 628 632 DEFINE_SLOWCASE_SLOW_OP(check_tdz) 629 633 DEFINE_SLOWCASE_SLOW_OP(to_property_key) -
trunk/Source/JavaScriptCore/jit/JIT.h
r277902 r278029 731 731 void emit_op_get_from_arguments(const Instruction*); 732 732 void emit_op_put_to_arguments(const Instruction*); 733 #if !ENABLE(EXTRA_CTI_THUNKS) 733 734 void emitSlow_op_get_from_scope(const Instruction*, Vector<SlowCaseEntry>::iterator&); 735 #endif 734 736 void emitSlow_op_put_to_scope(const Instruction*, Vector<SlowCaseEntry>::iterator&); 735 737 … … 797 799 static MacroAssemblerCodeRef<JITThunkPtrTag> slow_op_put_private_name_prepareCallGenerator(VM&); 798 800 static MacroAssemblerCodeRef<JITThunkPtrTag> slow_op_put_to_scopeGenerator(VM&); 801 static MacroAssemblerCodeRef<JITThunkPtrTag> slow_op_resolve_scopeGenerator(VM&); 799 802 800 803 static MacroAssemblerCodeRef<JITThunkPtrTag> op_check_traps_handlerGenerator(VM&); … … 802 805 static MacroAssemblerCodeRef<JITThunkPtrTag> op_ret_handlerGenerator(VM&); 803 806 static MacroAssemblerCodeRef<JITThunkPtrTag> op_throw_handlerGenerator(VM&); 804 #endif 807 808 static constexpr bool thunkIsUsedForOpGetFromScope(ResolveType resolveType) 809 { 810 // GlobalVar because it is more efficient to emit inline than use a thunk. 811 // LocalClosureVar and ModuleVar because we don't use these types with op_get_from_scope. 812 return !(resolveType == GlobalVar || resolveType == LocalClosureVar || resolveType == ModuleVar); 813 } 814 815 #define DECLARE_GET_FROM_SCOPE_GENERATOR(resolveType) \ 816 static MacroAssemblerCodeRef<JITThunkPtrTag> op_get_from_scope_##resolveType##Generator(VM&); 817 FOR_EACH_RESOLVE_TYPE(DECLARE_GET_FROM_SCOPE_GENERATOR) 818 #undef DECLARE_GET_FROM_SCOPE_GENERATOR 819 820 MacroAssemblerCodeRef<JITThunkPtrTag> generateOpGetFromScopeThunk(ResolveType, const char* thunkName); 821 822 static constexpr bool thunkIsUsedForOpResolveScope(ResolveType resolveType) 823 { 824 // ModuleVar because it is more efficient to emit inline than use a thunk. 825 // LocalClosureVar because we don't use these types with op_resolve_scope. 826 return !(resolveType == LocalClosureVar || resolveType == ModuleVar); 827 } 828 829 #define DECLARE_RESOLVE_SCOPE_GENERATOR(resolveType) \ 830 static MacroAssemblerCodeRef<JITThunkPtrTag> op_resolve_scope_##resolveType##Generator(VM&); 831 FOR_EACH_RESOLVE_TYPE(DECLARE_RESOLVE_SCOPE_GENERATOR) 832 #undef DECLARE_RESOLVE_SCOPE_GENERATOR 833 834 MacroAssemblerCodeRef<JITThunkPtrTag> generateOpResolveScopeThunk(ResolveType, const char* thunkName); 835 836 static MacroAssemblerCodeRef<JITThunkPtrTag> valueIsFalseyGenerator(VM&); 837 static MacroAssemblerCodeRef<JITThunkPtrTag> valueIsTruthyGenerator(VM&); 838 839 #endif // ENABLE(EXTRA_CTI_THUNKS) 805 840 806 841 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter) -
trunk/Source/JavaScriptCore/jit/JITOpcodes.cpp
r277974 r278029 448 448 unsigned target = jumpTarget(currentInstruction, bytecode.m_targetLabel); 449 449 450 GPRReg value = regT0; 451 GPRReg scratch1 = regT1; 452 GPRReg scratch2 = regT2; 453 bool shouldCheckMasqueradesAsUndefined = true; 450 constexpr GPRReg value = regT0; 454 451 455 452 emitGetVirtualRegister(bytecode.m_condition, value); 453 #if !ENABLE(EXTRA_CTI_THUNKS) 454 constexpr GPRReg scratch1 = regT1; 455 constexpr GPRReg scratch2 = regT2; 456 constexpr bool shouldCheckMasqueradesAsUndefined = true; 456 457 addJump(branchIfFalsey(vm(), JSValueRegs(value), scratch1, scratch2, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject()), target); 457 } 458 #else 459 emitNakedNearCall(vm().getCTIStub(valueIsFalseyGenerator).retaggedCode<NoPtrTag>()); 460 addJump(branchTest32(NonZero, regT0), target); 461 #endif 462 } 463 464 #if ENABLE(EXTRA_CTI_THUNKS) 465 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::valueIsFalseyGenerator(VM& vm) 466 { 467 // The thunk generated by this function can only work with the LLInt / Baseline JIT because 468 // it makes assumptions about the right globalObject being available from CallFrame::codeBlock(). 469 // DFG/FTL may inline functions belonging to other globalObjects, which may not match 470 // CallFrame::codeBlock(). 471 JIT jit(vm); 472 473 constexpr GPRReg value = regT0; 474 constexpr GPRReg scratch1 = regT1; 475 constexpr GPRReg scratch2 = regT2; 476 constexpr bool shouldCheckMasqueradesAsUndefined = true; 477 478 jit.tagReturnAddress(); 479 480 constexpr GPRReg globalObjectGPR = regT3; 481 jit.loadPtr(addressFor(CallFrameSlot::codeBlock), globalObjectGPR); 482 jit.loadPtr(Address(globalObjectGPR, CodeBlock::offsetOfGlobalObject()), globalObjectGPR); 483 auto isFalsey = jit.branchIfFalsey(vm, JSValueRegs(value), scratch1, scratch2, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, globalObjectGPR); 484 jit.move(TrustedImm32(0), regT0); 485 Jump done = jit.jump(); 486 487 isFalsey.link(&jit); 488 jit.move(TrustedImm32(1), regT0); 489 490 done.link(&jit); 491 jit.ret(); 492 493 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::Thunk); 494 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Baseline: valueIsfalsey"); 495 } 496 #endif // ENABLE(EXTRA_CTI_THUNKS) 458 497 459 498 void JIT::emit_op_jeq_null(const Instruction* currentInstruction) … … 569 608 unsigned target = jumpTarget(currentInstruction, bytecode.m_targetLabel); 570 609 571 GPRReg value = regT0; 572 GPRReg scratch1 = regT1; 573 GPRReg scratch2 = regT2; 574 bool shouldCheckMasqueradesAsUndefined = true; 610 constexpr GPRReg value = regT0; 611 575 612 emitGetVirtualRegister(bytecode.m_condition, value); 613 #if !ENABLE(EXTRA_CTI_THUNKS) 614 constexpr GPRReg scratch1 = regT1; 615 constexpr GPRReg scratch2 = regT2; 616 constexpr bool shouldCheckMasqueradesAsUndefined = true; 576 617 addJump(branchIfTruthy(vm(), JSValueRegs(value), scratch1, scratch2, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject()), target); 577 } 618 #else 619 emitNakedNearCall(vm().getCTIStub(valueIsTruthyGenerator).retaggedCode<NoPtrTag>()); 620 addJump(branchTest32(NonZero, regT0), target); 621 #endif 622 } 623 624 #if ENABLE(EXTRA_CTI_THUNKS) 625 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::valueIsTruthyGenerator(VM& vm) 626 { 627 // The thunk generated by this function can only work with the LLInt / Baseline JIT because 628 // it makes assumptions about the right globalObject being available from CallFrame::codeBlock(). 629 // DFG/FTL may inline functions belonging to other globalObjects, which may not match 630 // CallFrame::codeBlock(). 631 JIT jit(vm); 632 633 constexpr GPRReg value = regT0; 634 constexpr GPRReg scratch1 = regT1; 635 constexpr GPRReg scratch2 = regT2; 636 constexpr bool shouldCheckMasqueradesAsUndefined = true; 637 638 jit.tagReturnAddress(); 639 640 constexpr GPRReg globalObjectGPR = regT3; 641 jit.loadPtr(addressFor(CallFrameSlot::codeBlock), globalObjectGPR); 642 jit.loadPtr(Address(globalObjectGPR, CodeBlock::offsetOfGlobalObject()), globalObjectGPR); 643 auto isTruthy = jit.branchIfTruthy(vm, JSValueRegs(value), scratch1, scratch2, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, globalObjectGPR); 644 jit.move(TrustedImm32(0), regT0); 645 Jump done = jit.jump(); 646 647 isTruthy.link(&jit); 648 jit.move(TrustedImm32(1), regT0); 649 650 done.link(&jit); 651 jit.ret(); 652 653 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::Thunk); 654 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Baseline: valueIsfalsey"); 655 } 656 #endif // ENABLE(EXTRA_CTI_THUNKS) 578 657 579 658 void JIT::emit_op_neq(const Instruction* currentInstruction) -
trunk/Source/JavaScriptCore/jit/JITOperations.cpp
r277926 r278029 2899 2899 } 2900 2900 2901 #if ENABLE(EXTRA_CTI_THUNKS) 2902 JSC_DEFINE_JIT_OPERATION(operationResolveScopeForBaseline, EncodedJSValue, (JSGlobalObject* globalObject, const Instruction* pc)) 2903 { 2904 VM& vm = globalObject->vm(); 2905 CallFrame* callFrame = DECLARE_CALL_FRAME(vm); 2906 JITOperationPrologueCallFrameTracer tracer(vm, callFrame); 2907 auto throwScope = DECLARE_THROW_SCOPE(vm); 2908 2909 CodeBlock* codeBlock = callFrame->codeBlock(); 2910 2911 auto bytecode = pc->as<OpResolveScope>(); 2912 const Identifier& ident = codeBlock->identifier(bytecode.m_var); 2913 JSScope* scope = callFrame->uncheckedR(bytecode.m_scope).Register::scope(); 2914 JSObject* resolvedScope = JSScope::resolve(globalObject, scope, ident); 2915 // Proxy can throw an error here, e.g. Proxy in with statement's @unscopables. 2916 RETURN_IF_EXCEPTION(throwScope, { }); 2917 2918 auto& metadata = bytecode.metadata(codeBlock); 2919 ResolveType resolveType = metadata.m_resolveType; 2920 2921 // ModuleVar does not keep the scope register value alive in DFG. 2922 ASSERT(resolveType != ModuleVar); 2923 2924 switch (resolveType) { 2925 case GlobalProperty: 2926 case GlobalPropertyWithVarInjectionChecks: 2927 case UnresolvedProperty: 2928 case UnresolvedPropertyWithVarInjectionChecks: { 2929 if (resolvedScope->isGlobalObject()) { 2930 JSGlobalObject* globalObject = jsCast<JSGlobalObject*>(resolvedScope); 2931 bool hasProperty = globalObject->hasProperty(globalObject, ident); 2932 RETURN_IF_EXCEPTION(throwScope, { }); 2933 if (hasProperty) { 2934 ConcurrentJSLocker locker(codeBlock->m_lock); 2935 metadata.m_resolveType = needsVarInjectionChecks(resolveType) ? GlobalPropertyWithVarInjectionChecks : GlobalProperty; 2936 metadata.m_globalObject.set(vm, codeBlock, globalObject); 2937 metadata.m_globalLexicalBindingEpoch = globalObject->globalLexicalBindingEpoch(); 2938 } 2939 } else if (resolvedScope->isGlobalLexicalEnvironment()) { 2940 JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalLexicalEnvironment*>(resolvedScope); 2941 ConcurrentJSLocker locker(codeBlock->m_lock); 2942 metadata.m_resolveType = needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar; 2943 metadata.m_globalLexicalEnvironment.set(vm, codeBlock, globalLexicalEnvironment); 2944 } 2945 break; 2946 } 2947 default: 2948 break; 2949 } 2950 2951 return JSValue::encode(resolvedScope); 2952 } 2953 #endif 2954 2901 2955 JSC_DEFINE_JIT_OPERATION(operationGetFromScope, EncodedJSValue, (JSGlobalObject* globalObject, const Instruction* pc)) 2902 2956 { -
trunk/Source/JavaScriptCore/jit/JITOperations.h
r277926 r278029 286 286 JSC_DECLARE_JIT_OPERATION(operationSwitchImmWithUnknownKeyType, char*, (VM*, EncodedJSValue key, size_t tableIndex, int32_t min)); 287 287 JSC_DECLARE_JIT_OPERATION(operationSwitchStringWithUnknownKeyType, char*, (JSGlobalObject*, EncodedJSValue key, size_t tableIndex)); 288 #if ENABLE(EXTRA_CTI_THUNKS) 289 JSC_DECLARE_JIT_OPERATION(operationResolveScopeForBaseline, EncodedJSValue, (JSGlobalObject*, const Instruction* bytecodePC)); 290 #endif 288 291 JSC_DECLARE_JIT_OPERATION(operationGetFromScope, EncodedJSValue, (JSGlobalObject*, const Instruction* bytecodePC)); 289 292 JSC_DECLARE_JIT_OPERATION(operationPutToScope, void, (JSGlobalObject*, const Instruction* bytecodePC)); -
trunk/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
r277974 r278029 1590 1590 } 1591 1591 1592 #if !ENABLE(EXTRA_CTI_THUNKS) 1592 1593 void JIT::emit_op_resolve_scope(const Instruction* currentInstruction) 1593 1594 { … … 1693 1694 } 1694 1695 } 1696 #else // ENABLE(EXTRA_CTI_THUNKS) 1697 1698 void JIT::emit_op_resolve_scope(const Instruction* currentInstruction) 1699 { 1700 auto bytecode = currentInstruction->as<OpResolveScope>(); 1701 auto& metadata = bytecode.metadata(m_codeBlock); 1702 VirtualRegister dst = bytecode.m_dst; 1703 VirtualRegister scope = bytecode.m_scope; 1704 ResolveType resolveType = metadata.m_resolveType; 1705 1706 VM& vm = this->vm(); 1707 uint32_t bytecodeOffset = m_bytecodeIndex.offset(); 1708 ASSERT(BytecodeIndex(bytecodeOffset) == m_bytecodeIndex); 1709 ASSERT(m_codeBlock->instructionAt(m_bytecodeIndex) == currentInstruction); 1710 1711 constexpr GPRReg metadataGPR = regT7; 1712 constexpr GPRReg scopeGPR = regT6; 1713 constexpr GPRReg bytecodeOffsetGPR = regT5; 1714 1715 if (resolveType == ModuleVar) 1716 move(TrustedImmPtr(metadata.m_lexicalEnvironment.get()), regT0); 1717 else { 1718 ptrdiff_t metadataOffset = m_codeBlock->offsetInMetadataTable(&metadata); 1719 1720 #define RESOLVE_SCOPE_GENERATOR(resolveType) op_resolve_scope_##resolveType##Generator, 1721 static const ThunkGenerator generators[] = { 1722 FOR_EACH_RESOLVE_TYPE(RESOLVE_SCOPE_GENERATOR) 1723 }; 1724 #undef RESOLVE_SCOPE_GENERATOR 1725 1726 emitGetVirtualRegister(scope, scopeGPR); 1727 move(TrustedImmPtr(metadataOffset), metadataGPR); 1728 move(TrustedImm32(bytecodeOffset), bytecodeOffsetGPR); 1729 emitNakedNearCall(vm.getCTIStub(generators[resolveType]).retaggedCode<NoPtrTag>()); 1730 } 1731 1732 emitPutVirtualRegister(dst); 1733 } 1734 1735 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::generateOpResolveScopeThunk(ResolveType resolveType, const char* thunkName) 1736 { 1737 // The thunk generated by this function can only work with the LLInt / Baseline JIT because 1738 // it makes assumptions about the right globalObject being available from CallFrame::codeBlock(). 1739 // DFG/FTL may inline functions belonging to other globalObjects, which may not match 1740 // CallFrame::codeBlock(). 1741 using Metadata = OpResolveScope::Metadata; 1742 constexpr GPRReg metadataGPR = regT7; // incoming 1743 constexpr GPRReg scopeGPR = regT6; // incoming 1744 constexpr GPRReg bytecodeOffsetGPR = regT5; // incoming - pass thru to slow path. 1745 constexpr GPRReg globalObjectGPR = regT4; 1746 UNUSED_PARAM(bytecodeOffsetGPR); 1747 RELEASE_ASSERT(thunkIsUsedForOpResolveScope(resolveType)); 1748 1749 tagReturnAddress(); 1750 1751 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 1752 loadPtr(Address(regT3, CodeBlock::offsetOfMetadataTable()), regT3); 1753 addPtr(regT3, metadataGPR); 1754 1755 JumpList slowCase; 1756 1757 auto emitVarInjectionCheck = [&] (bool needsVarInjectionChecks, GPRReg globalObjectGPR = InvalidGPRReg) { 1758 if (!needsVarInjectionChecks) 1759 return; 1760 if (globalObjectGPR == InvalidGPRReg) { 1761 globalObjectGPR = regT4; 1762 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 1763 loadPtr(Address(regT3, CodeBlock::offsetOfGlobalObject()), globalObjectGPR); 1764 } 1765 loadPtr(Address(globalObjectGPR, OBJECT_OFFSETOF(JSGlobalObject, m_varInjectionWatchpoint)), regT3); 1766 slowCase.append(branch8(Equal, Address(regT3, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated))); 1767 }; 1768 1769 auto emitResolveClosure = [&] (bool needsVarInjectionChecks) { 1770 emitVarInjectionCheck(needsVarInjectionChecks); 1771 move(scopeGPR, regT0); 1772 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_localScopeDepth)), regT1); 1773 1774 Label loop = label(); 1775 Jump done = branchTest32(Zero, regT1); 1776 { 1777 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0); 1778 sub32(TrustedImm32(1), regT1); 1779 jump().linkTo(loop, this); 1780 } 1781 done.link(this); 1782 }; 1783 1784 auto emitCode = [&] (ResolveType resolveType) { 1785 switch (resolveType) { 1786 case GlobalProperty: 1787 case GlobalPropertyWithVarInjectionChecks: { 1788 // JSScope::constantScopeForCodeBlock() loads codeBlock->globalObject(). 1789 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 1790 loadPtr(Address(regT3, CodeBlock::offsetOfGlobalObject()), globalObjectGPR); 1791 emitVarInjectionCheck(needsVarInjectionChecks(resolveType), globalObjectGPR); 1792 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_globalLexicalBindingEpoch)), regT1); 1793 slowCase.append(branch32(NotEqual, Address(globalObjectGPR, JSGlobalObject::offsetOfGlobalLexicalBindingEpoch()), regT1)); 1794 move(globalObjectGPR, regT0); 1795 break; 1796 } 1797 1798 case GlobalVar: 1799 case GlobalVarWithVarInjectionChecks: 1800 case GlobalLexicalVar: 1801 case GlobalLexicalVarWithVarInjectionChecks: { 1802 // JSScope::constantScopeForCodeBlock() loads codeBlock->globalObject() for GlobalVar*, 1803 // and codeBlock->globalObject()->globalLexicalEnvironment() for GlobalLexicalVar*. 1804 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 1805 loadPtr(Address(regT3, CodeBlock::offsetOfGlobalObject()), regT0); 1806 emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0); 1807 if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) 1808 loadPtr(Address(regT0, JSGlobalObject::offsetOfGlobalLexicalEnvironment()), regT0); 1809 break; 1810 } 1811 case ClosureVar: 1812 case ClosureVarWithVarInjectionChecks: 1813 emitResolveClosure(needsVarInjectionChecks(resolveType)); 1814 break; 1815 case Dynamic: 1816 slowCase.append(jump()); 1817 break; 1818 case LocalClosureVar: 1819 case ModuleVar: 1820 case UnresolvedProperty: 1821 case UnresolvedPropertyWithVarInjectionChecks: 1822 RELEASE_ASSERT_NOT_REACHED(); 1823 } 1824 }; 1825 1826 switch (resolveType) { 1827 case GlobalProperty: 1828 case GlobalPropertyWithVarInjectionChecks: { 1829 JumpList skipToEnd; 1830 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_resolveType)), regT0); 1831 1832 Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(resolveType)); 1833 emitCode(resolveType); 1834 skipToEnd.append(jump()); 1835 1836 notGlobalProperty.link(this); 1837 emitCode(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar); 1838 1839 skipToEnd.link(this); 1840 break; 1841 } 1842 case UnresolvedProperty: 1843 case UnresolvedPropertyWithVarInjectionChecks: { 1844 JumpList skipToEnd; 1845 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_resolveType)), regT0); 1846 1847 Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty)); 1848 emitCode(GlobalProperty); 1849 skipToEnd.append(jump()); 1850 notGlobalProperty.link(this); 1851 1852 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks)); 1853 emitCode(GlobalPropertyWithVarInjectionChecks); 1854 skipToEnd.append(jump()); 1855 notGlobalPropertyWithVarInjections.link(this); 1856 1857 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar)); 1858 emitCode(GlobalLexicalVar); 1859 skipToEnd.append(jump()); 1860 notGlobalLexicalVar.link(this); 1861 1862 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks)); 1863 emitCode(GlobalLexicalVarWithVarInjectionChecks); 1864 skipToEnd.append(jump()); 1865 notGlobalLexicalVarWithVarInjections.link(this); 1866 1867 slowCase.append(jump()); 1868 skipToEnd.link(this); 1869 break; 1870 } 1871 1872 default: 1873 emitCode(resolveType); 1874 break; 1875 } 1876 1877 ret(); 1878 1879 LinkBuffer patchBuffer(*this, GLOBAL_THUNK_ID, LinkBuffer::Profile::Thunk); 1880 auto slowCaseHandler = vm().getCTIStub(slow_op_resolve_scopeGenerator); 1881 patchBuffer.link(slowCase, CodeLocationLabel(slowCaseHandler.retaggedCode<NoPtrTag>())); 1882 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, thunkName); 1883 } 1884 1885 #define DEFINE_RESOLVE_SCOPE_GENERATOR(resolveType) \ 1886 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::op_resolve_scope_##resolveType##Generator(VM& vm) \ 1887 { \ 1888 if constexpr (!thunkIsUsedForOpResolveScope(resolveType)) \ 1889 return { }; \ 1890 JIT jit(vm); \ 1891 return jit.generateOpResolveScopeThunk(resolveType, "Baseline: op_resolve_scope_" #resolveType); \ 1892 } 1893 FOR_EACH_RESOLVE_TYPE(DEFINE_RESOLVE_SCOPE_GENERATOR) 1894 #undef DEFINE_RESOLVE_SCOPE_GENERATOR 1895 1896 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::slow_op_resolve_scopeGenerator(VM& vm) 1897 { 1898 // The thunk generated by this function can only work with the LLInt / Baseline JIT because 1899 // it makes assumptions about the right globalObject being available from CallFrame::codeBlock(). 1900 // DFG/FTL may inline functions belonging to other globalObjects, which may not match 1901 // CallFrame::codeBlock(). 1902 JIT jit(vm); 1903 1904 // The fast path already pushed the return address. 1905 #if CPU(X86_64) 1906 jit.push(X86Registers::ebp); 1907 #elif CPU(ARM64) 1908 jit.pushPair(framePointerRegister, linkRegister); 1909 #endif 1910 1911 constexpr GPRReg bytecodeOffsetGPR = regT5; 1912 jit.store32(bytecodeOffsetGPR, tagFor(CallFrameSlot::argumentCountIncludingThis)); 1913 1914 constexpr GPRReg codeBlockGPR = argumentGPR3; 1915 constexpr GPRReg globalObjectGPR = argumentGPR0; 1916 constexpr GPRReg instructionGPR = argumentGPR1; 1917 1918 jit.loadPtr(addressFor(CallFrameSlot::codeBlock), codeBlockGPR); 1919 jit.loadPtr(Address(codeBlockGPR, CodeBlock::offsetOfGlobalObject()), globalObjectGPR); 1920 jit.loadPtr(Address(codeBlockGPR, CodeBlock::offsetOfInstructionsRawPointer()), instructionGPR); 1921 jit.addPtr(bytecodeOffsetGPR, instructionGPR); 1922 1923 jit.setupArguments<decltype(operationResolveScopeForBaseline)>(globalObjectGPR, instructionGPR); 1924 jit.prepareCallOperation(vm); 1925 Call operation = jit.call(OperationPtrTag); 1926 Jump exceptionCheck = jit.emitNonPatchableExceptionCheck(vm); 1927 1928 #if CPU(X86_64) 1929 jit.pop(X86Registers::ebp); 1930 #elif CPU(ARM64) 1931 jit.popPair(CCallHelpers::framePointerRegister, CCallHelpers::linkRegister); 1932 #endif 1933 jit.ret(); 1934 1935 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::Thunk); 1936 patchBuffer.link(operation, FunctionPtr<OperationPtrTag>(operationResolveScopeForBaseline)); 1937 auto handler = vm.getCTIStub(popThunkStackPreservesAndHandleExceptionGenerator); 1938 patchBuffer.link(exceptionCheck, CodeLocationLabel(handler.retaggedCode<NoPtrTag>())); 1939 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Baseline: slow_op_resolve_scope"); 1940 } 1941 #endif // ENABLE(EXTRA_CTI_THUNKS) 1695 1942 1696 1943 void JIT::emitLoadWithStructureCheck(VirtualRegister scope, Structure** structureSlot) … … 1720 1967 } 1721 1968 1969 #if !ENABLE(EXTRA_CTI_THUNKS) 1722 1970 void JIT::emit_op_get_from_scope(const Instruction* currentInstruction) 1723 1971 { … … 1845 2093 auto& metadata = bytecode.metadata(m_codeBlock); 1846 2094 VirtualRegister dst = bytecode.m_dst; 1847 1848 #if !ENABLE(EXTRA_CTI_THUNKS)1849 2095 callOperationWithProfile(metadata, operationGetFromScope, dst, TrustedImmPtr(m_codeBlock->globalObject()), currentInstruction); 1850 #else 2096 } 2097 2098 #else // ENABLE(EXTRA_CTI_THUNKS) 2099 2100 void JIT::emit_op_get_from_scope(const Instruction* currentInstruction) 2101 { 2102 auto bytecode = currentInstruction->as<OpGetFromScope>(); 2103 auto& metadata = bytecode.metadata(m_codeBlock); 2104 VirtualRegister dst = bytecode.m_dst; 2105 VirtualRegister scope = bytecode.m_scope; 2106 ResolveType resolveType = metadata.m_getPutInfo.resolveType(); 2107 1851 2108 VM& vm = this->vm(); 1852 2109 uint32_t bytecodeOffset = m_bytecodeIndex.offset(); … … 1854 2111 ASSERT(m_codeBlock->instructionAt(m_bytecodeIndex) == currentInstruction); 1855 2112 1856 constexpr GPRReg bytecodeOffsetGPR = argumentGPR2; 1857 move(TrustedImm32(bytecodeOffset), bytecodeOffsetGPR); 1858 1859 emitNakedNearCall(vm.getCTIStub(slow_op_get_from_scopeGenerator).retaggedCode<NoPtrTag>()); 1860 1861 emitValueProfilingSite(metadata, returnValueGPR); 1862 emitPutVirtualRegister(dst, returnValueGPR); 1863 #endif // ENABLE(EXTRA_CTI_THUNKS) 1864 } 1865 1866 #if ENABLE(EXTRA_CTI_THUNKS) 2113 constexpr GPRReg metadataGPR = regT7; 2114 constexpr GPRReg scopeGPR = regT6; 2115 constexpr GPRReg bytecodeOffsetGPR = regT5; 2116 2117 if (resolveType == GlobalVar) { 2118 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&metadata.m_operand); 2119 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0); 2120 } else { 2121 ptrdiff_t metadataOffset = m_codeBlock->offsetInMetadataTable(&metadata); 2122 2123 #define GET_FROM_SCOPE_GENERATOR(resolveType) op_get_from_scope_##resolveType##Generator, 2124 static const ThunkGenerator generators[] = { 2125 FOR_EACH_RESOLVE_TYPE(GET_FROM_SCOPE_GENERATOR) 2126 }; 2127 #undef GET_FROM_SCOPE_GENERATOR 2128 2129 emitGetVirtualRegister(scope, scopeGPR); 2130 move(TrustedImmPtr(metadataOffset), metadataGPR); 2131 move(TrustedImm32(bytecodeOffset), bytecodeOffsetGPR); 2132 emitNakedNearCall(vm.getCTIStub(generators[resolveType]).retaggedCode<NoPtrTag>()); 2133 } 2134 emitPutVirtualRegister(dst); 2135 } 2136 2137 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::generateOpGetFromScopeThunk(ResolveType resolveType, const char* thunkName) 2138 { 2139 // The thunk generated by this function can only work with the LLInt / Baseline JIT because 2140 // it makes assumptions about the right globalObject being available from CallFrame::codeBlock(). 2141 // DFG/FTL may inline functions belonging to other globalObjects, which may not match 2142 // CallFrame::codeBlock(). 2143 using Metadata = OpGetFromScope::Metadata; 2144 constexpr GPRReg metadataGPR = regT7; 2145 constexpr GPRReg scopeGPR = regT6; 2146 RELEASE_ASSERT(thunkIsUsedForOpGetFromScope(resolveType)); 2147 2148 tagReturnAddress(); 2149 2150 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 2151 loadPtr(Address(regT3, CodeBlock::offsetOfMetadataTable()), regT3); 2152 addPtr(regT3, metadataGPR); 2153 2154 JumpList slowCase; 2155 2156 auto emitLoadWithStructureCheck = [&] (GPRReg scopeGPR, int32_t metadataStructureOffset) { 2157 loadPtr(Address(metadataGPR, metadataStructureOffset), regT1); 2158 move(scopeGPR, regT0); 2159 slowCase.append(branchTestPtr(Zero, regT1)); 2160 load32(Address(regT1, Structure::structureIDOffset()), regT1); 2161 slowCase.append(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1)); 2162 }; 2163 2164 auto emitVarInjectionCheck = [&] (bool needsVarInjectionChecks) { 2165 if (!needsVarInjectionChecks) 2166 return; 2167 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 2168 loadPtr(Address(regT3, CodeBlock::offsetOfGlobalObject()), regT3); 2169 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalObject, m_varInjectionWatchpoint)), regT3); 2170 slowCase.append(branch8(Equal, Address(regT3, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated))); 2171 }; 2172 2173 auto emitGetVarFromPointer = [&] (int32_t operand, GPRReg reg) { 2174 loadPtr(Address(metadataGPR, operand), reg); 2175 loadPtr(reg, reg); 2176 }; 2177 2178 auto emitGetVarFromIndirectPointer = [&] (int32_t operand, GPRReg reg) { 2179 loadPtr(Address(metadataGPR, operand), reg); 2180 loadPtr(reg, reg); 2181 }; 2182 2183 auto emitGetClosureVar = [&] (GPRReg scopeGPR, GPRReg operandGPR) { 2184 static_assert(1 << 3 == sizeof(Register)); 2185 lshift64(TrustedImm32(3), operandGPR); 2186 addPtr(scopeGPR, operandGPR); 2187 loadPtr(Address(operandGPR, JSLexicalEnvironment::offsetOfVariables()), regT0); 2188 }; 2189 2190 auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) { 2191 switch (resolveType) { 2192 case GlobalProperty: 2193 case GlobalPropertyWithVarInjectionChecks: { 2194 emitLoadWithStructureCheck(scopeGPR, OBJECT_OFFSETOF(Metadata, m_structure)); // Structure check covers var injection since we don't cache structures for anything but the GlobalObject. Additionally, resolve_scope handles checking for the var injection. 2195 2196 constexpr GPRReg base = regT0; 2197 constexpr GPRReg result = regT0; 2198 constexpr GPRReg offset = regT1; 2199 constexpr GPRReg scratch = regT2; 2200 2201 jitAssert(scopedLambda<Jump(void)>([&] () -> Jump { 2202 loadPtr(addressFor(CallFrameSlot::codeBlock), regT3); 2203 loadPtr(Address(regT3, CodeBlock::offsetOfGlobalObject()), regT3); 2204 return branchPtr(Equal, base, regT3); 2205 })); 2206 2207 loadPtr(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_operand)), offset); 2208 if (ASSERT_ENABLED) { 2209 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset)); 2210 abortWithReason(JITOffsetIsNotOutOfLine); 2211 isOutOfLine.link(this); 2212 } 2213 loadPtr(Address(base, JSObject::butterflyOffset()), scratch); 2214 neg32(offset); 2215 signExtend32ToPtr(offset, offset); 2216 load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result); 2217 break; 2218 } 2219 case GlobalVar: 2220 case GlobalVarWithVarInjectionChecks: 2221 case GlobalLexicalVar: 2222 case GlobalLexicalVarWithVarInjectionChecks: 2223 emitVarInjectionCheck(needsVarInjectionChecks(resolveType)); 2224 if (indirectLoadForOperand) 2225 emitGetVarFromIndirectPointer(OBJECT_OFFSETOF(Metadata, m_operand), regT0); 2226 else 2227 emitGetVarFromPointer(OBJECT_OFFSETOF(Metadata, m_operand), regT0); 2228 if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check. 2229 slowCase.append(branchIfEmpty(regT0)); 2230 break; 2231 case ClosureVar: 2232 case ClosureVarWithVarInjectionChecks: 2233 emitVarInjectionCheck(needsVarInjectionChecks(resolveType)); 2234 loadPtr(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_operand)), regT3); 2235 emitGetClosureVar(scopeGPR, regT3); 2236 break; 2237 case Dynamic: 2238 slowCase.append(jump()); 2239 break; 2240 case LocalClosureVar: 2241 case ModuleVar: 2242 case UnresolvedProperty: 2243 case UnresolvedPropertyWithVarInjectionChecks: 2244 RELEASE_ASSERT_NOT_REACHED(); 2245 } 2246 }; 2247 2248 switch (resolveType) { 2249 case GlobalProperty: 2250 case GlobalPropertyWithVarInjectionChecks: { 2251 JumpList skipToEnd; 2252 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_getPutInfo)), regT0); 2253 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0 2254 2255 Jump isNotGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(resolveType)); 2256 emitCode(resolveType, false); 2257 skipToEnd.append(jump()); 2258 2259 isNotGlobalProperty.link(this); 2260 emitCode(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar, true); 2261 2262 skipToEnd.link(this); 2263 break; 2264 } 2265 case UnresolvedProperty: 2266 case UnresolvedPropertyWithVarInjectionChecks: { 2267 JumpList skipToEnd; 2268 load32(Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_getPutInfo)), regT0); 2269 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0 2270 2271 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty)); 2272 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks)); 2273 isGlobalProperty.link(this); 2274 emitCode(GlobalProperty, false); 2275 skipToEnd.append(jump()); 2276 notGlobalPropertyWithVarInjections.link(this); 2277 2278 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar)); 2279 emitCode(GlobalLexicalVar, true); 2280 skipToEnd.append(jump()); 2281 notGlobalLexicalVar.link(this); 2282 2283 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks)); 2284 emitCode(GlobalLexicalVarWithVarInjectionChecks, true); 2285 skipToEnd.append(jump()); 2286 notGlobalLexicalVarWithVarInjections.link(this); 2287 2288 slowCase.append(jump()); 2289 2290 skipToEnd.link(this); 2291 break; 2292 } 2293 2294 default: 2295 emitCode(resolveType, false); 2296 break; 2297 } 2298 2299 static_assert(ValueProfile::numberOfBuckets == 1); 2300 store64(regT0, Address(metadataGPR, OBJECT_OFFSETOF(Metadata, m_profile))); 2301 2302 ret(); 2303 2304 LinkBuffer patchBuffer(*this, GLOBAL_THUNK_ID, LinkBuffer::Profile::Thunk); 2305 auto slowCaseHandler = vm().getCTIStub(slow_op_get_from_scopeGenerator); 2306 patchBuffer.link(slowCase, CodeLocationLabel(slowCaseHandler.retaggedCode<NoPtrTag>())); 2307 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, thunkName); 2308 } 2309 2310 #define DEFINE_GET_FROM_SCOPE_GENERATOR(resolveType) \ 2311 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::op_get_from_scope_##resolveType##Generator(VM& vm) \ 2312 { \ 2313 if constexpr (!thunkIsUsedForOpGetFromScope(resolveType)) \ 2314 return { }; \ 2315 JIT jit(vm); \ 2316 return jit.generateOpGetFromScopeThunk(resolveType, "Baseline: op_get_from_scope_" #resolveType); \ 2317 } 2318 FOR_EACH_RESOLVE_TYPE(DEFINE_GET_FROM_SCOPE_GENERATOR) 2319 #undef DEFINE_GET_FROM_SCOPE_GENERATOR 2320 1867 2321 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::slow_op_get_from_scopeGenerator(VM& vm) 1868 2322 { … … 1876 2330 jit.push(X86Registers::ebp); 1877 2331 #elif CPU(ARM64) 1878 jit.tagReturnAddress();1879 2332 jit.pushPair(framePointerRegister, linkRegister); 1880 2333 #endif 1881 2334 1882 constexpr GPRReg bytecodeOffsetGPR = argumentGPR2; 2335 using Metadata = OpGetFromScope::Metadata; 2336 constexpr GPRReg metadataGPR = regT7; 2337 constexpr GPRReg bytecodeOffsetGPR = regT5; 1883 2338 jit.store32(bytecodeOffsetGPR, tagFor(CallFrameSlot::argumentCountIncludingThis)); 1884 2339 … … 1892 2347 jit.addPtr(bytecodeOffsetGPR, instructionGPR); 1893 2348 2349 ASSERT(RegisterSet::calleeSaveRegisters().contains(GPRInfo::numberTagRegister)); 2350 jit.move(metadataGPR, GPRInfo::numberTagRegister); // Preserve metadata in a callee saved register. 1894 2351 jit.setupArguments<decltype(operationGetFromScope)>(globalObjectGPR, instructionGPR); 1895 2352 jit.prepareCallOperation(vm); 1896 CCallHelpers::Call operation = jit.call(OperationPtrTag); 1897 CCallHelpers::Jump exceptionCheck = jit.emitNonPatchableExceptionCheck(vm); 2353 Call operation = jit.call(OperationPtrTag); 2354 Jump exceptionCheck = jit.emitNonPatchableExceptionCheck(vm); 2355 2356 jit.store64(regT0, Address(GPRInfo::numberTagRegister, OBJECT_OFFSETOF(Metadata, m_profile))); 2357 jit.move(TrustedImm64(JSValue::NumberTag), GPRInfo::numberTagRegister); 1898 2358 1899 2359 #if CPU(X86_64) … … 1904 2364 jit.ret(); 1905 2365 2366 exceptionCheck.link(&jit); 2367 jit.move(TrustedImm64(JSValue::NumberTag), GPRInfo::numberTagRegister); 2368 Jump jumpToHandler = jit.jump(); 2369 1906 2370 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::ExtraCTIThunk); 1907 2371 patchBuffer.link(operation, FunctionPtr<OperationPtrTag>(operationGetFromScope)); 1908 2372 auto handler = vm.getCTIStub(popThunkStackPreservesAndHandleExceptionGenerator); 1909 patchBuffer.link( exceptionCheck, CodeLocationLabel(handler.retaggedCode<NoPtrTag>()));2373 patchBuffer.link(jumpToHandler, CodeLocationLabel(handler.retaggedCode<NoPtrTag>())); 1910 2374 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Baseline: slow_op_get_from_scope"); 1911 2375 } -
trunk/Source/JavaScriptCore/jit/ThunkGenerators.cpp
r277974 r278029 92 92 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::ExtraCTIThunk); 93 93 auto handler = vm.getCTIStub(handleExceptionGenerator); 94 RELEASE_ASSERT(handler);95 94 patchBuffer.link(continuation, CodeLocationLabel(handler.retaggedCode<NoPtrTag>())); 96 95 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "popThunkStackPreservesAndHandleException"); -
trunk/Source/JavaScriptCore/runtime/GetPutInfo.h
r259676 r278029 1 1 /* 2 * Copyright (C) 2015-202 0Apple Inc. All Rights Reserved.2 * Copyright (C) 2015-2021 Apple Inc. All Rights Reserved. 3 3 * 4 4 * Redistribution and use in source and binary forms, with or without … … 40 40 }; 41 41 42 #define FOR_EACH_RESOLVE_TYPE(v) \ 43 v(GlobalProperty) \ 44 v(GlobalVar) \ 45 v(GlobalLexicalVar) \ 46 v(ClosureVar) \ 47 v(LocalClosureVar) \ 48 v(ModuleVar) \ 49 v(GlobalPropertyWithVarInjectionChecks) \ 50 v(GlobalVarWithVarInjectionChecks) \ 51 v(GlobalLexicalVarWithVarInjectionChecks) \ 52 v(ClosureVarWithVarInjectionChecks) \ 53 v(UnresolvedProperty) \ 54 v(UnresolvedPropertyWithVarInjectionChecks) \ 55 v(Dynamic) 56 42 57 enum ResolveType : unsigned { 43 58 // Lexical scope guaranteed a certain type of variable access. -
trunk/Source/JavaScriptCore/runtime/JSGlobalObject.h
r277665 r278029 884 884 885 885 static ptrdiff_t offsetOfVM() { return OBJECT_OFFSETOF(JSGlobalObject, m_vm); } 886 static ptrdiff_t offsetOfGlobalLexicalEnvironment() { return OBJECT_OFFSETOF(JSGlobalObject, m_globalLexicalEnvironment); } 887 static ptrdiff_t offsetOfGlobalLexicalBindingEpoch() { return OBJECT_OFFSETOF(JSGlobalObject, m_globalLexicalBindingEpoch); } 886 888 887 889 #if ENABLE(REMOTE_INSPECTOR)
Note: See TracChangeset
for help on using the changeset viewer.