Changeset 250775 in webkit
- Timestamp:
- Oct 7, 2019 9:47:30 AM (5 years ago)
- Location:
- trunk
- Files:
-
- 2 added
- 2 deleted
- 20 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/JSTests/ChangeLog
r250750 r250775 1 2019-10-07 Matt Lewis <jlewis3@apple.com> 2 3 Unreviewed, rolling out r250750. 4 5 Reverting change as this broke interal test over the weekend. 6 7 Reverted changeset: 8 9 "Allow OSR exit to the LLInt" 10 https://bugs.webkit.org/show_bug.cgi?id=197993 11 https://trac.webkit.org/changeset/250750 12 1 13 2019-10-04 Saam Barati <sbarati@apple.com> 2 14 -
trunk/Source/JavaScriptCore/ChangeLog
r250752 r250775 1 2019-10-07 Matt Lewis <jlewis3@apple.com> 2 3 Unreviewed, rolling out r250750. 4 5 Reverting change as this broke interal test over the weekend. 6 7 Reverted changeset: 8 9 "Allow OSR exit to the LLInt" 10 https://bugs.webkit.org/show_bug.cgi?id=197993 11 https://trac.webkit.org/changeset/250750 12 1 13 2019-10-04 Ross Kirsling <ross.kirsling@sony.com> 2 14 -
trunk/Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
r250750 r250775 183 183 0F235BE217178E1C00690C7F /* FTLThunks.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F235BCC17178E1C00690C7F /* FTLThunks.h */; settings = {ATTRIBUTES = (Private, ); }; }; 184 184 0F235BEC17178E7300690C7F /* DFGOSRExitBase.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F235BE817178E7300690C7F /* DFGOSRExitBase.h */; }; 185 0F235BEE17178E7300690C7F /* DFGOSRExitPreparation.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F235BEA17178E7300690C7F /* DFGOSRExitPreparation.h */; }; 185 186 0F24E54117EA9F5900ABB217 /* AssemblyHelpers.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E53C17EA9F5900ABB217 /* AssemblyHelpers.h */; settings = {ATTRIBUTES = (Private, ); }; }; 186 187 0F24E54217EA9F5900ABB217 /* CCallHelpers.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E53D17EA9F5900ABB217 /* CCallHelpers.h */; settings = {ATTRIBUTES = (Private, ); }; }; … … 2296 2297 0F235BE717178E7300690C7F /* DFGOSRExitBase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGOSRExitBase.cpp; path = dfg/DFGOSRExitBase.cpp; sourceTree = "<group>"; }; 2297 2298 0F235BE817178E7300690C7F /* DFGOSRExitBase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGOSRExitBase.h; path = dfg/DFGOSRExitBase.h; sourceTree = "<group>"; }; 2299 0F235BE917178E7300690C7F /* DFGOSRExitPreparation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGOSRExitPreparation.cpp; path = dfg/DFGOSRExitPreparation.cpp; sourceTree = "<group>"; }; 2300 0F235BEA17178E7300690C7F /* DFGOSRExitPreparation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGOSRExitPreparation.h; path = dfg/DFGOSRExitPreparation.h; sourceTree = "<group>"; }; 2298 2301 0F24E53B17EA9F5900ABB217 /* AssemblyHelpers.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AssemblyHelpers.cpp; sourceTree = "<group>"; }; 2299 2302 0F24E53C17EA9F5900ABB217 /* AssemblyHelpers.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AssemblyHelpers.h; sourceTree = "<group>"; }; … … 7870 7873 0FEFC9A71681A3B000567F53 /* DFGOSRExitJumpPlaceholder.cpp */, 7871 7874 0FEFC9A81681A3B000567F53 /* DFGOSRExitJumpPlaceholder.h */, 7875 0F235BE917178E7300690C7F /* DFGOSRExitPreparation.cpp */, 7876 0F235BEA17178E7300690C7F /* DFGOSRExitPreparation.h */, 7872 7877 0F6237951AE45CA700D402EA /* DFGPhantomInsertionPhase.cpp */, 7873 7878 0F6237961AE45CA700D402EA /* DFGPhantomInsertionPhase.h */, … … 9235 9240 0F392C8A1B46188400844728 /* DFGOSRExitFuzz.h in Headers */, 9236 9241 0FEFC9AB1681A3B600567F53 /* DFGOSRExitJumpPlaceholder.h in Headers */, 9242 0F235BEE17178E7300690C7F /* DFGOSRExitPreparation.h in Headers */, 9237 9243 0F6237981AE45CA700D402EA /* DFGPhantomInsertionPhase.h in Headers */, 9238 9244 0FFFC95C14EF90AF00C72532 /* DFGPhase.h in Headers */, -
trunk/Source/JavaScriptCore/Sources.txt
r250750 r250775 383 383 dfg/DFGOSRExitFuzz.cpp 384 384 dfg/DFGOSRExitJumpPlaceholder.cpp 385 dfg/DFGOSRExitPreparation.cpp 385 386 dfg/DFGObjectAllocationSinkingPhase.cpp 386 387 dfg/DFGObjectMaterializationData.cpp -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.h
r250750 r250775 893 893 } 894 894 895 MetadataTable* metadataTable() { return m_metadata.get(); }896 const void* instructionsRawPointer() { return m_instructionsRawPointer; }897 898 895 protected: 899 896 void finalizeLLIntInlineCaches(); -
trunk/Source/JavaScriptCore/bytecode/InlineCallFrame.h
r250750 r250775 241 241 inline CodeBlock* baselineCodeBlockForOriginAndBaselineCodeBlock(const CodeOrigin& codeOrigin, CodeBlock* baselineCodeBlock) 242 242 { 243 ASSERT( JITCode::isBaselineCode(baselineCodeBlock->jitType()));243 ASSERT(baselineCodeBlock->jitType() == JITType::BaselineJIT); 244 244 auto* inlineCallFrame = codeOrigin.inlineCallFrame(); 245 245 if (inlineCallFrame) -
trunk/Source/JavaScriptCore/dfg/DFGOSRExit.cpp
r250750 r250775 35 35 #include "DFGMayExit.h" 36 36 #include "DFGOSRExitCompilerCommon.h" 37 #include "DFGOSRExitPreparation.h" 37 38 #include "DFGOperations.h" 38 39 #include "DFGSpeculativeJIT.h" … … 372 373 // exit ramp code. 373 374 375 // Ensure we have baseline codeBlocks to OSR exit to. 376 prepareCodeOriginForOSRExit(exec, exit.m_codeOrigin); 377 374 378 CodeBlock* baselineCodeBlock = codeBlock->baselineAlternative(); 375 ASSERT( JITCode::isBaselineCode(baselineCodeBlock->jitType()));379 ASSERT(baselineCodeBlock->jitType() == JITType::BaselineJIT); 376 380 377 381 SpeculationRecovery* recovery = nullptr; … … 403 407 404 408 CodeBlock* codeBlockForExit = baselineCodeBlockForOriginAndBaselineCodeBlock(exit.m_codeOrigin, baselineCodeBlock); 405 bool exitToLLInt = Options::forceOSRExitToLLInt() || codeBlockForExit->jitType() == JITType::InterpreterThunk; 406 void* jumpTarget; 407 if (exitToLLInt) { 408 unsigned bytecodeOffset = exit.m_codeOrigin.bytecodeIndex(); 409 const Instruction& currentInstruction = *codeBlockForExit->instructions().at(bytecodeOffset).ptr(); 410 MacroAssemblerCodePtr<JSEntryPtrTag> destination = LLInt::getCodePtr<JSEntryPtrTag>(currentInstruction); 411 jumpTarget = destination.executableAddress(); 412 } else { 413 const JITCodeMap& codeMap = codeBlockForExit->jitCodeMap(); 414 CodeLocationLabel<JSEntryPtrTag> codeLocation = codeMap.find(exit.m_codeOrigin.bytecodeIndex()); 415 ASSERT(codeLocation); 416 jumpTarget = codeLocation.executableAddress(); 417 } 409 const JITCodeMap& codeMap = codeBlockForExit->jitCodeMap(); 410 CodeLocationLabel<JSEntryPtrTag> codeLocation = codeMap.find(exit.m_codeOrigin.bytecodeIndex()); 411 ASSERT(codeLocation); 412 413 void* jumpTarget = codeLocation.executableAddress(); 418 414 419 415 // Compute the value recoveries. … … 423 419 ptrdiff_t stackPointerOffset = -static_cast<ptrdiff_t>(codeBlock->jitCode()->dfgCommon()->requiredRegisterCountForExit) * sizeof(Register); 424 420 425 exit.exitState = adoptRef(new OSRExitState(exit, codeBlock, baselineCodeBlock, operands, WTFMove(undefinedOperandSpans), recovery, stackPointerOffset, activeThreshold, adjustedThreshold, jumpTarget, arrayProfile , exitToLLInt));421 exit.exitState = adoptRef(new OSRExitState(exit, codeBlock, baselineCodeBlock, operands, WTFMove(undefinedOperandSpans), recovery, stackPointerOffset, activeThreshold, adjustedThreshold, jumpTarget, arrayProfile)); 426 422 427 423 if (UNLIKELY(vm.m_perBytecodeProfiler && codeBlock->jitCode()->dfgCommon()->compilation)) { … … 451 447 OSRExitState& exitState = *exit.exitState.get(); 452 448 CodeBlock* baselineCodeBlock = exitState.baselineCodeBlock; 453 ASSERT( JITCode::isBaselineCode(baselineCodeBlock->jitType()));449 ASSERT(baselineCodeBlock->jitType() == JITType::BaselineJIT); 454 450 455 451 Operands<ValueRecovery>& operands = exitState.operands; … … 762 758 // in presence of inlined tail calls. 763 759 // https://bugs.webkit.org/show_bug.cgi?id=147511 764 ASSERT( JITCode::isBaselineCode(outermostBaselineCodeBlock->jitType()));760 ASSERT(outermostBaselineCodeBlock->jitType() == JITType::BaselineJIT); 765 761 frame.setOperand<CodeBlock*>(CallFrameSlot::codeBlock, outermostBaselineCodeBlock); 766 762 … … 773 769 void* callerFrame = cpu.fp(); 774 770 775 bool callerIsLLInt = false;776 777 771 if (!trueCaller) { 778 772 ASSERT(inlineCallFrame->isTail()); … … 788 782 CodeBlock* baselineCodeBlockForCaller = baselineCodeBlockForOriginAndBaselineCodeBlock(*trueCaller, outermostBaselineCodeBlock); 789 783 unsigned callBytecodeIndex = trueCaller->bytecodeIndex(); 790 void* jumpTarget = callerReturnPC(baselineCodeBlockForCaller, callBytecodeIndex, trueCallerCallKind, callerIsLLInt); 784 MacroAssemblerCodePtr<JSInternalPtrTag> jumpTarget; 785 786 switch (trueCallerCallKind) { 787 case InlineCallFrame::Call: 788 case InlineCallFrame::Construct: 789 case InlineCallFrame::CallVarargs: 790 case InlineCallFrame::ConstructVarargs: 791 case InlineCallFrame::TailCall: 792 case InlineCallFrame::TailCallVarargs: { 793 CallLinkInfo* callLinkInfo = 794 baselineCodeBlockForCaller->getCallLinkInfoForBytecodeIndex(callBytecodeIndex); 795 RELEASE_ASSERT(callLinkInfo); 796 797 jumpTarget = callLinkInfo->callReturnLocation(); 798 break; 799 } 800 801 case InlineCallFrame::GetterCall: 802 case InlineCallFrame::SetterCall: { 803 StructureStubInfo* stubInfo = 804 baselineCodeBlockForCaller->findStubInfo(CodeOrigin(callBytecodeIndex)); 805 RELEASE_ASSERT(stubInfo); 806 807 jumpTarget = stubInfo->doneLocation(); 808 break; 809 } 810 811 default: 812 RELEASE_ASSERT_NOT_REACHED(); 813 } 791 814 792 815 if (trueCaller->inlineCallFrame()) 793 816 callerFrame = cpu.fp<uint8_t*>() + trueCaller->inlineCallFrame()->stackOffset * sizeof(EncodedJSValue); 794 817 818 void* targetAddress = jumpTarget.executableAddress(); 795 819 #if CPU(ARM64E) 796 820 void* newEntrySP = cpu.fp<uint8_t*>() + inlineCallFrame->returnPCOffset() + sizeof(void*); 797 jumpTarget = tagCodePtr(jumpTarget, bitwise_cast<PtrTag>(newEntrySP));798 #endif 799 frame.set<void*>(inlineCallFrame->returnPCOffset(), jumpTarget);821 targetAddress = retagCodePtr(targetAddress, JSInternalPtrTag, bitwise_cast<PtrTag>(newEntrySP)); 822 #endif 823 frame.set<void*>(inlineCallFrame->returnPCOffset(), targetAddress); 800 824 } 801 825 … … 806 830 // copy the prior contents of the tag registers already saved for the outer frame to this frame. 807 831 saveOrCopyCalleeSavesFor(context, baselineCodeBlock, VirtualRegister(inlineCallFrame->stackOffset), !trueCaller); 808 809 if (callerIsLLInt) {810 CodeBlock* baselineCodeBlockForCaller = baselineCodeBlockForOriginAndBaselineCodeBlock(*trueCaller, outermostBaselineCodeBlock);811 frame.set<const void*>(calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::metadataTableGPR).offset, baselineCodeBlockForCaller->metadataTable());812 #if USE(JSVALUE64)813 frame.set<const void*>(calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::pbGPR).offset, baselineCodeBlockForCaller->instructionsRawPointer());814 #endif815 }816 832 817 833 if (!inlineCallFrame->isVarargs()) … … 879 895 880 896 vm.topCallFrame = context.fp<ExecState*>(); 881 882 if (exitState->isJumpToLLInt) {883 CodeBlock* codeBlockForExit = baselineCodeBlockForOriginAndBaselineCodeBlock(exit.m_codeOrigin, baselineCodeBlock);884 unsigned bytecodeOffset = exit.m_codeOrigin.bytecodeIndex();885 const Instruction& currentInstruction = *codeBlockForExit->instructions().at(bytecodeOffset).ptr();886 887 context.gpr(LLInt::Registers::metadataTableGPR) = bitwise_cast<uintptr_t>(codeBlockForExit->metadataTable());888 #if USE(JSVALUE64)889 context.gpr(LLInt::Registers::pbGPR) = bitwise_cast<uintptr_t>(codeBlockForExit->instructionsRawPointer());890 context.gpr(LLInt::Registers::pcGPR) = static_cast<uintptr_t>(exit.m_codeOrigin.bytecodeIndex());891 #else892 context.gpr(LLInt::Registers::pcGPR) = bitwise_cast<uintptr_t>(¤tInstruction);893 #endif894 895 if (exit.isExceptionHandler())896 vm.targetInterpreterPCForThrow = ¤tInstruction;897 }898 899 897 context.pc() = untagCodePtr<JSEntryPtrTag>(jumpTarget); 900 898 } … … 1055 1053 EXCEPTION_ASSERT_UNUSED(scope, !!scope.exception() || !exit.isExceptionHandler()); 1056 1054 1055 prepareCodeOriginForOSRExit(exec, exit.m_codeOrigin); 1056 1057 1057 // Compute the value recoveries. 1058 1058 Operands<ValueRecovery> operands; -
trunk/Source/JavaScriptCore/dfg/DFGOSRExit.h
r250750 r250775 107 107 108 108 struct OSRExitState : RefCounted<OSRExitState> { 109 OSRExitState(OSRExitBase& exit, CodeBlock* codeBlock, CodeBlock* baselineCodeBlock, Operands<ValueRecovery>& operands, Vector<UndefinedOperandSpan>&& undefinedOperandSpans, SpeculationRecovery* recovery, ptrdiff_t stackPointerOffset, int32_t activeThreshold, double memoryUsageAdjustedThreshold, void* jumpTarget, ArrayProfile* arrayProfile , bool isJumpToLLInt)109 OSRExitState(OSRExitBase& exit, CodeBlock* codeBlock, CodeBlock* baselineCodeBlock, Operands<ValueRecovery>& operands, Vector<UndefinedOperandSpan>&& undefinedOperandSpans, SpeculationRecovery* recovery, ptrdiff_t stackPointerOffset, int32_t activeThreshold, double memoryUsageAdjustedThreshold, void* jumpTarget, ArrayProfile* arrayProfile) 110 110 : exit(exit) 111 111 , codeBlock(codeBlock) … … 119 119 , jumpTarget(jumpTarget) 120 120 , arrayProfile(arrayProfile) 121 , isJumpToLLInt(isJumpToLLInt)122 121 { } 123 122 … … 133 132 void* jumpTarget; 134 133 ArrayProfile* arrayProfile; 135 bool isJumpToLLInt;136 134 137 135 ExtraInitializationLevel extraInitializationLevel; -
trunk/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.cpp
r250750 r250775 34 34 #include "JSCJSValueInlines.h" 35 35 #include "JSCInlines.h" 36 #include "LLIntData.h"37 36 #include "StructureStubInfo.h" 38 37 39 38 namespace JSC { namespace DFG { 40 41 // These are the LLInt OSR exit return points.42 extern "C" void op_call_return_location_narrow();43 extern "C" void op_call_return_location_wide_16();44 extern "C" void op_call_return_location_wide_32();45 46 extern "C" void op_construct_return_location_narrow();47 extern "C" void op_construct_return_location_wide_16();48 extern "C" void op_construct_return_location_wide_32();49 50 extern "C" void op_call_varargs_slow_return_location_narrow();51 extern "C" void op_call_varargs_slow_return_location_wide_16();52 extern "C" void op_call_varargs_slow_return_location_wide_32();53 54 extern "C" void op_construct_varargs_slow_return_location_narrow();55 extern "C" void op_construct_varargs_slow_return_location_wide_16();56 extern "C" void op_construct_varargs_slow_return_location_wide_32();57 58 extern "C" void op_get_by_id_return_location_narrow();59 extern "C" void op_get_by_id_return_location_wide_16();60 extern "C" void op_get_by_id_return_location_wide_32();61 62 extern "C" void op_get_by_val_return_location_narrow();63 extern "C" void op_get_by_val_return_location_wide_16();64 extern "C" void op_get_by_val_return_location_wide_32();65 66 extern "C" void op_put_by_id_return_location_narrow();67 extern "C" void op_put_by_id_return_location_wide_16();68 extern "C" void op_put_by_id_return_location_wide_32();69 70 extern "C" void op_put_by_val_return_location_narrow();71 extern "C" void op_put_by_val_return_location_wide_16();72 extern "C" void op_put_by_val_return_location_wide_32();73 39 74 40 void handleExitCounts(CCallHelpers& jit, const OSRExitBase& exit) … … 171 137 } 172 138 173 void* callerReturnPC(CodeBlock* baselineCodeBlockForCaller, unsigned callBytecodeIndex, InlineCallFrame::Kind trueCallerCallKind, bool& callerIsLLInt)174 {175 callerIsLLInt = Options::forceOSRExitToLLInt() || baselineCodeBlockForCaller->jitType() == JITType::InterpreterThunk;176 177 void* jumpTarget;178 179 if (callerIsLLInt) {180 const Instruction& callInstruction = *baselineCodeBlockForCaller->instructions().at(callBytecodeIndex).ptr();181 182 #define LLINT_RETURN_LOCATION(name) FunctionPtr<NoPtrTag>(callInstruction.isWide16() ? name##_return_location_wide_16 : (callInstruction.isWide32() ? name##_return_location_wide_32 : name##_return_location_narrow)).executableAddress()183 184 switch (trueCallerCallKind) {185 case InlineCallFrame::Call:186 jumpTarget = LLINT_RETURN_LOCATION(op_call);187 break;188 case InlineCallFrame::Construct:189 jumpTarget = LLINT_RETURN_LOCATION(op_construct);190 break;191 case InlineCallFrame::CallVarargs:192 jumpTarget = LLINT_RETURN_LOCATION(op_call_varargs_slow);193 break;194 case InlineCallFrame::ConstructVarargs:195 jumpTarget = LLINT_RETURN_LOCATION(op_construct_varargs_slow);196 break;197 case InlineCallFrame::GetterCall: {198 if (callInstruction.opcodeID() == op_get_by_id)199 jumpTarget = LLINT_RETURN_LOCATION(op_get_by_id);200 else if (callInstruction.opcodeID() == op_get_by_val)201 jumpTarget = LLINT_RETURN_LOCATION(op_get_by_val);202 else203 RELEASE_ASSERT_NOT_REACHED();204 break;205 }206 case InlineCallFrame::SetterCall: {207 if (callInstruction.opcodeID() == op_put_by_id)208 jumpTarget = LLINT_RETURN_LOCATION(op_put_by_id);209 else if (callInstruction.opcodeID() == op_put_by_val)210 jumpTarget = LLINT_RETURN_LOCATION(op_put_by_val);211 else212 RELEASE_ASSERT_NOT_REACHED();213 break;214 }215 default:216 RELEASE_ASSERT_NOT_REACHED();217 }218 219 #undef LLINT_RETURN_LOCATION220 221 } else {222 switch (trueCallerCallKind) {223 case InlineCallFrame::Call:224 case InlineCallFrame::Construct:225 case InlineCallFrame::CallVarargs:226 case InlineCallFrame::ConstructVarargs: {227 CallLinkInfo* callLinkInfo =228 baselineCodeBlockForCaller->getCallLinkInfoForBytecodeIndex(callBytecodeIndex);229 RELEASE_ASSERT(callLinkInfo);230 231 jumpTarget = callLinkInfo->callReturnLocation().untaggedExecutableAddress();232 break;233 }234 235 case InlineCallFrame::GetterCall:236 case InlineCallFrame::SetterCall: {237 StructureStubInfo* stubInfo =238 baselineCodeBlockForCaller->findStubInfo(CodeOrigin(callBytecodeIndex));239 RELEASE_ASSERT(stubInfo);240 241 jumpTarget = stubInfo->doneLocation().untaggedExecutableAddress();242 break;243 }244 245 default:246 RELEASE_ASSERT_NOT_REACHED();247 }248 }249 250 return jumpTarget;251 }252 253 CCallHelpers::Address calleeSaveSlot(InlineCallFrame* inlineCallFrame, CodeBlock* baselineCodeBlock, GPRReg calleeSave)254 {255 const RegisterAtOffsetList* calleeSaves = baselineCodeBlock->calleeSaveRegisters();256 for (unsigned i = 0; i < calleeSaves->size(); i++) {257 RegisterAtOffset entry = calleeSaves->at(i);258 if (entry.reg() != calleeSave)259 continue;260 return CCallHelpers::Address(CCallHelpers::framePointerRegister, static_cast<VirtualRegister>(inlineCallFrame->stackOffset).offsetInBytes() + entry.offset());261 }262 263 RELEASE_ASSERT_NOT_REACHED();264 return CCallHelpers::Address(CCallHelpers::framePointerRegister);265 }266 267 139 void reifyInlinedCallFrames(CCallHelpers& jit, const OSRExitBase& exit) 268 140 { … … 270 142 // in presence of inlined tail calls. 271 143 // https://bugs.webkit.org/show_bug.cgi?id=147511 272 ASSERT( JITCode::isBaselineCode(jit.baselineCodeBlock()->jitType()));144 ASSERT(jit.baselineCodeBlock()->jitType() == JITType::BaselineJIT); 273 145 jit.storePtr(AssemblyHelpers::TrustedImmPtr(jit.baselineCodeBlock()), AssemblyHelpers::addressFor((VirtualRegister)CallFrameSlot::codeBlock)); 274 146 … … 280 152 CodeOrigin* trueCaller = inlineCallFrame->getCallerSkippingTailCalls(&trueCallerCallKind); 281 153 GPRReg callerFrameGPR = GPRInfo::callFrameRegister; 282 283 bool callerIsLLInt = false;284 154 285 155 if (!trueCaller) { … … 298 168 CodeBlock* baselineCodeBlockForCaller = jit.baselineCodeBlockFor(*trueCaller); 299 169 unsigned callBytecodeIndex = trueCaller->bytecodeIndex(); 300 void* jumpTarget = callerReturnPC(baselineCodeBlockForCaller, callBytecodeIndex, trueCallerCallKind, callerIsLLInt); 170 void* jumpTarget = nullptr; 171 172 switch (trueCallerCallKind) { 173 case InlineCallFrame::Call: 174 case InlineCallFrame::Construct: 175 case InlineCallFrame::CallVarargs: 176 case InlineCallFrame::ConstructVarargs: 177 case InlineCallFrame::TailCall: 178 case InlineCallFrame::TailCallVarargs: { 179 CallLinkInfo* callLinkInfo = 180 baselineCodeBlockForCaller->getCallLinkInfoForBytecodeIndex(callBytecodeIndex); 181 RELEASE_ASSERT(callLinkInfo); 182 183 jumpTarget = callLinkInfo->callReturnLocation().untaggedExecutableAddress(); 184 break; 185 } 186 187 case InlineCallFrame::GetterCall: 188 case InlineCallFrame::SetterCall: { 189 StructureStubInfo* stubInfo = 190 baselineCodeBlockForCaller->findStubInfo(CodeOrigin(callBytecodeIndex)); 191 RELEASE_ASSERT(stubInfo); 192 193 jumpTarget = stubInfo->doneLocation().untaggedExecutableAddress(); 194 break; 195 } 196 197 default: 198 RELEASE_ASSERT_NOT_REACHED(); 199 } 301 200 302 201 if (trueCaller->inlineCallFrame()) { … … 328 227 trueCaller ? AssemblyHelpers::UseExistingTagRegisterContents : AssemblyHelpers::CopyBaselineCalleeSavedRegistersFromBaseFrame, 329 228 GPRInfo::regT2); 330 331 if (callerIsLLInt) {332 CodeBlock* baselineCodeBlockForCaller = jit.baselineCodeBlockFor(*trueCaller);333 jit.storePtr(CCallHelpers::TrustedImmPtr(baselineCodeBlockForCaller->metadataTable()), calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::metadataTableGPR));334 #if USE(JSVALUE64)335 jit.storePtr(CCallHelpers::TrustedImmPtr(baselineCodeBlockForCaller->instructionsRawPointer()), calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::pbGPR));336 #endif337 }338 229 339 230 if (!inlineCallFrame->isVarargs()) … … 411 302 CodeBlock* codeBlockForExit = jit.baselineCodeBlockFor(exit.m_codeOrigin); 412 303 ASSERT(codeBlockForExit == codeBlockForExit->baselineVersion()); 413 ASSERT(JITCode::isBaselineCode(codeBlockForExit->jitType())); 414 415 void* jumpTarget; 416 bool exitToLLInt = Options::forceOSRExitToLLInt() || codeBlockForExit->jitType() == JITType::InterpreterThunk; 417 if (exitToLLInt) { 418 unsigned bytecodeOffset = exit.m_codeOrigin.bytecodeIndex(); 419 const Instruction& currentInstruction = *codeBlockForExit->instructions().at(bytecodeOffset).ptr(); 420 MacroAssemblerCodePtr<JSEntryPtrTag> destination = LLInt::getCodePtr<JSEntryPtrTag>(currentInstruction); 421 422 if (exit.isExceptionHandler()) { 423 jit.move(CCallHelpers::TrustedImmPtr(¤tInstruction), GPRInfo::regT2); 424 jit.storePtr(GPRInfo::regT2, &vm.targetInterpreterPCForThrow); 425 } 426 427 jit.move(CCallHelpers::TrustedImmPtr(codeBlockForExit->metadataTable()), LLInt::Registers::metadataTableGPR); 428 #if USE(JSVALUE64) 429 jit.move(CCallHelpers::TrustedImmPtr(codeBlockForExit->instructionsRawPointer()), LLInt::Registers::pbGPR); 430 jit.move(CCallHelpers::TrustedImm32(bytecodeOffset), LLInt::Registers::pcGPR); 431 #else 432 jit.move(CCallHelpers::TrustedImmPtr(¤tInstruction), LLInt::Registers::pcGPR); 433 #endif 434 jumpTarget = destination.retagged<OSRExitPtrTag>().executableAddress(); 435 } else { 436 CodeLocationLabel<JSEntryPtrTag> codeLocation = codeBlockForExit->jitCodeMap().find(exit.m_codeOrigin.bytecodeIndex()); 437 ASSERT(codeLocation); 438 439 jumpTarget = codeLocation.retagged<OSRExitPtrTag>().executableAddress(); 440 } 441 304 ASSERT(codeBlockForExit->jitType() == JITType::BaselineJIT); 305 CodeLocationLabel<JSEntryPtrTag> codeLocation = codeBlockForExit->jitCodeMap().find(exit.m_codeOrigin.bytecodeIndex()); 306 ASSERT(codeLocation); 307 308 void* jumpTarget = codeLocation.retagged<OSRExitPtrTag>().executableAddress(); 442 309 jit.addPtr(AssemblyHelpers::TrustedImm32(JIT::stackPointerOffsetFor(codeBlockForExit) * sizeof(Register)), GPRInfo::callFrameRegister, AssemblyHelpers::stackPointerRegister); 443 310 if (exit.isExceptionHandler()) { -
trunk/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.h
r250750 r250775 40 40 void reifyInlinedCallFrames(CCallHelpers&, const OSRExitBase&); 41 41 void adjustAndJumpToTarget(VM&, CCallHelpers&, const OSRExitBase&); 42 void* callerReturnPC(CodeBlock* baselineCodeBlockForCaller, unsigned callBytecodeOffset, InlineCallFrame::Kind callerKind, bool& callerIsLLInt);43 CCallHelpers::Address calleeSaveSlot(InlineCallFrame*, CodeBlock* baselineCodeBlock, GPRReg calleeSave);44 42 45 43 template <typename JITCodeType> -
trunk/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
r250750 r250775 31 31 #include "BytecodeStructs.h" 32 32 #include "DFGOSRExitCompilerCommon.h" 33 #include "DFGOSRExitPreparation.h" 33 34 #include "FTLExitArgumentForOperand.h" 34 35 #include "FTLJITCode.h" … … 544 545 } 545 546 547 prepareCodeOriginForOSRExit(exec, exit.m_codeOrigin); 548 546 549 compileStub(exitID, jitCode, exit, &vm, codeBlock); 547 550 -
trunk/Source/JavaScriptCore/llint/LLIntData.h
r250750 r250775 26 26 #pragma once 27 27 28 #include "GPRInfo.h"29 #include "Instruction.h"30 28 #include "JSCJSValue.h" 31 29 #include "MacroAssemblerCodeRef.h" … … 35 33 36 34 class VM; 35 struct Instruction; 37 36 38 37 #if ENABLE(C_LOOP) … … 147 146 148 147 template<PtrTag tag> 149 ALWAYS_INLINE MacroAssemblerCodePtr<tag> getCodePtr(const Instruction& instruction)150 {151 if (instruction.isWide16())152 return getWide16CodePtr<tag>(instruction.opcodeID());153 if (instruction.isWide32())154 return getWide32CodePtr<tag>(instruction.opcodeID());155 return getCodePtr<tag>(instruction.opcodeID());156 }157 158 template<PtrTag tag>159 148 ALWAYS_INLINE MacroAssemblerCodeRef<tag> getCodeRef(OpcodeID opcodeID) 160 149 { … … 196 185 } 197 186 198 #if ENABLE(JIT)199 struct Registers {200 static const GPRReg pcGPR = GPRInfo::regT4;201 202 #if CPU(X86_64) && !OS(WINDOWS)203 static const GPRReg metadataTableGPR = GPRInfo::regCS1;204 static const GPRReg pbGPR = GPRInfo::regCS2;205 #elif CPU(X86_64) && OS(WINDOWS)206 static const GPRReg metadataTableGPR = GPRInfo::regCS3;207 static const GPRReg pbGPR = GPRInfo::regCS4;208 #elif CPU(ARM64)209 static const GPRReg metadataTableGPR = GPRInfo::regCS6;210 static const GPRReg pbGPR = GPRInfo::regCS7;211 #elif CPU(MIPS) || CPU(ARM_THUMB2)212 static const GPRReg metadataTableGPR = GPRInfo::regCS0;213 #endif214 };215 #endif216 217 187 } } // namespace JSC::LLInt -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
r250750 r250775 930 930 end 931 931 932 macro defineOSRExitReturnLabel(opcodeName, size) 933 macro defineNarrow() 934 global _%opcodeName%_return_location_narrow 935 _%opcodeName%_return_location_narrow: 936 end 937 938 macro defineWide16() 939 global _%opcodeName%_return_location_wide_16 940 _%opcodeName%_return_location_wide_16: 941 end 942 943 macro defineWide32() 944 global _%opcodeName%_return_location_wide_32 945 _%opcodeName%_return_location_wide_32: 946 end 947 948 size(defineNarrow, defineWide16, defineWide32, macro (f) f() end) 949 end 950 951 macro callTargetFunction(opcodeName, size, opcodeStruct, dispatch, callee, callPtrTag) 932 macro callTargetFunction(size, opcodeStruct, dispatch, callee, callPtrTag) 952 933 if C_LOOP or C_LOOP_WIN 953 934 cloopCallJSFunction callee … … 955 936 call callee, callPtrTag 956 937 end 957 958 defineOSRExitReturnLabel(opcodeName, size)959 938 restoreStackPointerAfterCall() 960 939 dispatchAfterCall(size, opcodeStruct, dispatch) … … 1026 1005 end 1027 1006 1028 macro slowPathForCall( opcodeName,size, opcodeStruct, dispatch, slowPath, prepareCall)1007 macro slowPathForCall(size, opcodeStruct, dispatch, slowPath, prepareCall) 1029 1008 callCallSlowPath( 1030 1009 slowPath, … … 1035 1014 prepareCall(callee, t2, t3, t4, SlowPathPtrTag) 1036 1015 .dontUpdateSP: 1037 callTargetFunction( %opcodeName%_slow,size, opcodeStruct, dispatch, callee, SlowPathPtrTag)1016 callTargetFunction(size, opcodeStruct, dispatch, callee, SlowPathPtrTag) 1038 1017 end) 1039 end1040 1041 macro getterSetterOSRExitReturnPoint(opName, size)1042 crash() # We don't reach this in straight line code. We only reach it via returning to the code below when reconstructing stack frames during OSR exit.1043 1044 defineOSRExitReturnLabel(opName, size)1045 1046 restoreStackPointerAfterCall()1047 loadi ArgumentCount + TagOffset[cfr], PC1048 1018 end 1049 1019 … … 1772 1742 1773 1743 1774 macro doCallVarargs( opcodeName,size, opcodeStruct, dispatch, frameSlowPath, slowPath, prepareCall)1744 macro doCallVarargs(size, opcodeStruct, dispatch, frameSlowPath, slowPath, prepareCall) 1775 1745 callSlowPath(frameSlowPath) 1776 1746 branchIfException(_llint_throw_from_slow_path_trampoline) … … 1787 1757 end 1788 1758 end 1789 slowPathForCall( opcodeName,size, opcodeStruct, dispatch, slowPath, prepareCall)1759 slowPathForCall(size, opcodeStruct, dispatch, slowPath, prepareCall) 1790 1760 end 1791 1761 1792 1762 1793 1763 llintOp(op_call_varargs, OpCallVarargs, macro (size, get, dispatch) 1794 doCallVarargs( op_call_varargs,size, OpCallVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_call_varargs, prepareForRegularCall)1764 doCallVarargs(size, OpCallVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_call_varargs, prepareForRegularCall) 1795 1765 end) 1796 1766 … … 1799 1769 # We lie and perform the tail call instead of preparing it since we can't 1800 1770 # prepare the frame for a call opcode 1801 doCallVarargs( op_tail_call_varargs,size, OpTailCallVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_tail_call_varargs, prepareForTailCall)1771 doCallVarargs(size, OpTailCallVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_tail_call_varargs, prepareForTailCall) 1802 1772 end) 1803 1773 … … 1807 1777 # We lie and perform the tail call instead of preparing it since we can't 1808 1778 # prepare the frame for a call opcode 1809 doCallVarargs( op_tail_call_forward_arguments,size, OpTailCallForwardArguments, dispatch, _llint_slow_path_size_frame_for_forward_arguments, _llint_slow_path_tail_call_forward_arguments, prepareForTailCall)1779 doCallVarargs(size, OpTailCallForwardArguments, dispatch, _llint_slow_path_size_frame_for_forward_arguments, _llint_slow_path_tail_call_forward_arguments, prepareForTailCall) 1810 1780 end) 1811 1781 1812 1782 1813 1783 llintOp(op_construct_varargs, OpConstructVarargs, macro (size, get, dispatch) 1814 doCallVarargs( op_construct_varargs,size, OpConstructVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_construct_varargs, prepareForRegularCall)1784 doCallVarargs(size, OpConstructVarargs, dispatch, _llint_slow_path_size_frame_for_varargs, _llint_slow_path_construct_varargs, prepareForRegularCall) 1815 1785 end) 1816 1786 … … 1851 1821 _llint_op_call_eval: 1852 1822 slowPathForCall( 1853 op_call_eval_narrow,1854 1823 narrow, 1855 1824 OpCallEval, … … 1860 1829 _llint_op_call_eval_wide16: 1861 1830 slowPathForCall( 1862 op_call_eval_wide16,1863 1831 wide16, 1864 1832 OpCallEval, … … 1869 1837 _llint_op_call_eval_wide32: 1870 1838 slowPathForCall( 1871 op_call_eval_wide32,1872 1839 wide32, 1873 1840 OpCallEval, -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm
r250750 r250775 1399 1399 callSlowPath(_llint_slow_path_get_by_id) 1400 1400 dispatch() 1401 1402 # osr return point1403 getterSetterOSRExitReturnPoint(op_get_by_id, size)1404 metadata(t2, t3)1405 valueProfile(OpGetById, t2, r1, r0)1406 return(r1, r0)1407 1408 1401 end) 1409 1402 … … 1468 1461 callSlowPath(_llint_slow_path_put_by_id) 1469 1462 dispatch() 1470 1471 # osr return point1472 getterSetterOSRExitReturnPoint(op_put_by_id, size)1473 dispatch()1474 1475 1463 end) 1476 1464 … … 1524 1512 callSlowPath(_llint_slow_path_get_by_val) 1525 1513 dispatch() 1526 1527 # osr return point 1528 getterSetterOSRExitReturnPoint(op_get_by_val, size) 1529 metadata(t2, t3) 1530 valueProfile(OpGetByVal, t2, r1, r0) 1531 return(r1, r0) 1532 1533 end) 1534 1535 1536 macro putByValOp(opcodeName, opcodeStruct, osrExitPoint) 1514 end) 1515 1516 1517 macro putByValOp(opcodeName, opcodeStruct) 1537 1518 llintOpWithMetadata(op_%opcodeName%, opcodeStruct, macro (size, get, dispatch, metadata, return) 1538 1519 macro contiguousPutByVal(storeCallback) … … 1622 1603 callSlowPath(_llint_slow_path_%opcodeName%) 1623 1604 dispatch() 1624 1625 .osrExitPoint:1626 osrExitPoint(size, dispatch)1627 1605 end) 1628 1606 end 1629 1607 1630 1608 1631 putByValOp(put_by_val, OpPutByVal, macro (size, dispatch) 1632 # osr return point 1633 getterSetterOSRExitReturnPoint(op_put_by_val, size) 1634 dispatch() 1635 end) 1636 1637 putByValOp(put_by_val_direct, OpPutByValDirect, macro (a, b) end) 1609 putByValOp(put_by_val, OpPutByVal) 1610 1611 putByValOp(put_by_val_direct, OpPutByValDirect) 1638 1612 1639 1613 … … 1902 1876 move t3, sp 1903 1877 prepareCall(%opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], t2, t3, t4, JSEntryPtrTag) 1904 callTargetFunction( opcodeName,size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], JSEntryPtrTag)1878 callTargetFunction(size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], JSEntryPtrTag) 1905 1879 1906 1880 .opCallSlow: 1907 slowPathForCall( opcodeName,size, opcodeStruct, dispatch, slowPath, prepareCall)1881 slowPathForCall(size, opcodeStruct, dispatch, slowPath, prepareCall) 1908 1882 end) 1909 1883 end -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
r250750 r250775 1326 1326 end) 1327 1327 1328 1328 1329 llintOpWithMetadata(op_get_by_id, OpGetById, macro (size, get, dispatch, metadata, return) 1329 1330 metadata(t2, t1) … … 1376 1377 callSlowPath(_llint_slow_path_get_by_id) 1377 1378 dispatch() 1378 1379 # osr return point1380 getterSetterOSRExitReturnPoint(op_get_by_id, size)1381 metadata(t2, t3)1382 valueProfile(OpGetById, t2, r0)1383 return(r0)1384 1385 1379 end) 1386 1380 … … 1455 1449 callSlowPath(_llint_slow_path_put_by_id) 1456 1450 dispatch() 1457 1458 # osr return point1459 getterSetterOSRExitReturnPoint(op_put_by_id, size)1460 dispatch()1461 1462 1451 end) 1463 1452 … … 1631 1620 callSlowPath(_llint_slow_path_get_by_val) 1632 1621 dispatch() 1633 1634 # osr return point 1635 getterSetterOSRExitReturnPoint(op_get_by_val, size) 1636 metadata(t5, t2) 1637 valueProfile(OpGetByVal, t5, r0) 1638 return(r0) 1639 1640 end) 1641 1642 1643 macro putByValOp(opcodeName, opcodeStruct, osrExitPoint) 1622 end) 1623 1624 1625 macro putByValOp(opcodeName, opcodeStruct) 1644 1626 llintOpWithMetadata(op_%opcodeName%, opcodeStruct, macro (size, get, dispatch, metadata, return) 1645 1627 macro contiguousPutByVal(storeCallback) … … 1729 1711 callSlowPath(_llint_slow_path_%opcodeName%) 1730 1712 dispatch() 1731 1732 osrExitPoint(size, dispatch)1733 1734 1713 end) 1735 1714 end 1736 1715 1737 putByValOp(put_by_val, OpPutByVal, macro (size, dispatch) 1738 # osr return point 1739 getterSetterOSRExitReturnPoint(op_put_by_val, size) 1740 dispatch() 1741 end) 1742 1743 putByValOp(put_by_val_direct, OpPutByValDirect, macro (a, b) end) 1716 putByValOp(put_by_val, OpPutByVal) 1717 1718 putByValOp(put_by_val_direct, OpPutByValDirect) 1744 1719 1745 1720 … … 2030 2005 move t3, sp 2031 2006 prepareCall(%opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], t2, t3, t4, JSEntryPtrTag) 2032 callTargetFunction( opcodeName,size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], JSEntryPtrTag)2007 callTargetFunction(size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.m_machineCodeTarget[t5], JSEntryPtrTag) 2033 2008 2034 2009 .opCallSlow: 2035 slowPathForCall( opcodeName,size, opcodeStruct, dispatch, slowPath, prepareCall)2010 slowPathForCall(size, opcodeStruct, dispatch, slowPath, prepareCall) 2036 2011 end) 2037 2012 end -
trunk/Source/JavaScriptCore/offlineasm/asm.rb
r250750 r250775 215 215 def putsLabel(labelName, isGlobal) 216 216 raise unless @state == :asm 217 unless isGlobal 218 @deferredNextLabelActions.each { 219 | action | 220 action.call() 221 } 222 end 217 @deferredNextLabelActions.each { 218 | action | 219 action.call() 220 } 223 221 @deferredNextLabelActions = [] 224 222 @numGlobalLabels += 1 … … 404 402 lowLevelAST.validate 405 403 emitCodeInConfiguration(concreteSettings, lowLevelAST, backend) { 406 $currentSettings = concreteSettings404 $currentSettings = concreteSettings 407 405 $asm.inAsm { 408 406 lowLevelAST.lower(backend) -
trunk/Source/JavaScriptCore/offlineasm/transform.rb
r250750 r250775 260 260 end 261 261 } 262 result = Label.forName(codeOrigin, name, @definedInFile) 263 result.setGlobal() if @global 264 result 262 Label.forName(codeOrigin, name, @definedInFile) 265 263 else 266 264 self … … 275 273 mapping[var].name 276 274 } 277 result = Label.forName(codeOrigin, name, @definedInFile) 278 result.setGlobal() if @global 279 result 275 Label.forName(codeOrigin, name, @definedInFile) 280 276 else 281 277 self -
trunk/Source/JavaScriptCore/runtime/OptionsList.h
r250750 r250775 465 465 v(Double, dumpJITMemoryFlushInterval, 10, Restricted, "Maximum time in between flushes of the JIT memory dump in seconds.") \ 466 466 v(Bool, useUnlinkedCodeBlockJettisoning, false, Normal, "If true, UnlinkedCodeBlock can be jettisoned.") \ 467 v(Bool, forceOSRExitToLLInt, false, Normal, "If true, we always exit to the LLInt. If false, we exit to whatever is most convenient.") \468 467 469 468 enum OptionEquivalence { -
trunk/Tools/ChangeLog
r250774 r250775 1 2019-10-07 Matt Lewis <jlewis3@apple.com> 2 3 Unreviewed, rolling out r250750. 4 5 Reverting change as this broke interal test over the weekend. 6 7 Reverted changeset: 8 9 "Allow OSR exit to the LLInt" 10 https://bugs.webkit.org/show_bug.cgi?id=197993 11 https://trac.webkit.org/changeset/250750 12 1 13 2019-10-07 youenn fablet <youenn@apple.com> 2 14 -
trunk/Tools/Scripts/run-jsc-stress-tests
r250750 r250775 496 496 FTL_OPTIONS = ["--useFTLJIT=true"] 497 497 PROBE_OSR_EXIT_OPTION = ["--useProbeOSRExit=true"] 498 FORCE_LLINT_EXIT_OPTIONS = ["--forceOSRExitToLLInt=true"]499 498 500 499 require_relative "webkitruby/jsc-stress-test-writer-#{$testWriter}" … … 710 709 711 710 def runFTLNoCJITB3O0(*optionalTestSpecificOptions) 712 run("ftl-no-cjit-b3o0", "--useArrayAllocationProfiling=false", "--forcePolyProto=true", *(FTL_OPTIONS + NO_CJIT_OPTIONS + B3O0_OPTIONS + FORCE_LLINT_EXIT_OPTIONS +optionalTestSpecificOptions))711 run("ftl-no-cjit-b3o0", "--useArrayAllocationProfiling=false", "--forcePolyProto=true", *(FTL_OPTIONS + NO_CJIT_OPTIONS + B3O0_OPTIONS + optionalTestSpecificOptions)) 713 712 end 714 713 … … 730 729 731 730 def runDFGEager(*optionalTestSpecificOptions) 732 run("dfg-eager", *(EAGER_OPTIONS + COLLECT_CONTINUOUSLY_OPTIONS + PROBE_OSR_EXIT_OPTION + FORCE_LLINT_EXIT_OPTIONS +optionalTestSpecificOptions))731 run("dfg-eager", *(EAGER_OPTIONS + COLLECT_CONTINUOUSLY_OPTIONS + PROBE_OSR_EXIT_OPTION + optionalTestSpecificOptions)) 733 732 end 734 733 … … 747 746 748 747 def runFTLEagerNoCJITValidate(*optionalTestSpecificOptions) 749 run("ftl-eager-no-cjit", "--validateGraph=true", "--airForceIRCAllocator=true", *(FTL_OPTIONS + NO_CJIT_OPTIONS + EAGER_OPTIONS + COLLECT_CONTINUOUSLY_OPTIONS + FORCE_LLINT_EXIT_OPTIONS +optionalTestSpecificOptions))748 run("ftl-eager-no-cjit", "--validateGraph=true", "--airForceIRCAllocator=true", *(FTL_OPTIONS + NO_CJIT_OPTIONS + EAGER_OPTIONS + COLLECT_CONTINUOUSLY_OPTIONS + optionalTestSpecificOptions)) 750 749 end 751 750
Note: See TracChangeset
for help on using the changeset viewer.