Changeset 249372 in webkit
- Timestamp:
- Sep 1, 2019 8:44:32 PM (5 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 16 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r249370 r249372 1 2019-09-01 Yusuke Suzuki <ysuzuki@apple.com> 2 3 [JSC] Merge op_check_traps into op_enter and op_loop_hint 4 https://bugs.webkit.org/show_bug.cgi?id=201373 5 6 Reviewed by Mark Lam. 7 8 This patch removes op_check_traps. Previously we were conditionally emitting op_check_traps based on Options and Platform configurations. 9 But now we are always emitting op_check_traps. So it is not necessary to have separate bytecode as op_check_traps. We can do checking in 10 op_enter and op_loop_hint. 11 12 While this patch moves check_traps implementation to op_enter and op_loop_hint, we keep separate DFG nodes (CheckTraps or InvalidationPoint), 13 since inserted nodes are different based on configurations and options. And emitting multiple DFG nodes from one bytecode is easy. 14 15 We also inline op_enter's slow path's write-barrier emission in LLInt. 16 17 * bytecode/BytecodeList.rb: 18 * bytecode/BytecodeUseDef.h: 19 (JSC::computeUsesForBytecodeOffset): 20 (JSC::computeDefsForBytecodeOffset): 21 * bytecompiler/BytecodeGenerator.cpp: 22 (JSC::BytecodeGenerator::BytecodeGenerator): 23 (JSC::BytecodeGenerator::emitLoopHint): 24 (JSC::BytecodeGenerator::emitCheckTraps): Deleted. 25 * bytecompiler/BytecodeGenerator.h: 26 * dfg/DFGByteCodeParser.cpp: 27 (JSC::DFG::ByteCodeParser::handleRecursiveTailCall): 28 (JSC::DFG::ByteCodeParser::parseBlock): 29 * dfg/DFGCapabilities.cpp: 30 (JSC::DFG::capabilityLevel): 31 * jit/JIT.cpp: 32 (JSC::JIT::privateCompileMainPass): 33 (JSC::JIT::privateCompileSlowCases): 34 (JSC::JIT::emitEnterOptimizationCheck): Deleted. 35 * jit/JIT.h: 36 * jit/JITOpcodes.cpp: 37 (JSC::JIT::emit_op_loop_hint): 38 (JSC::JIT::emitSlow_op_loop_hint): 39 (JSC::JIT::emit_op_enter): 40 (JSC::JIT::emitSlow_op_enter): 41 (JSC::JIT::emit_op_check_traps): Deleted. 42 (JSC::JIT::emitSlow_op_check_traps): Deleted. 43 * jit/JITOpcodes32_64.cpp: 44 (JSC::JIT::emit_op_enter): Deleted. 45 * llint/LowLevelInterpreter.asm: 46 * llint/LowLevelInterpreter32_64.asm: 47 * llint/LowLevelInterpreter64.asm: 48 * runtime/CommonSlowPaths.cpp: 49 * runtime/CommonSlowPaths.h: 50 1 51 2019-09-01 Yusuke Suzuki <ysuzuki@apple.com> 2 52 -
trunk/Source/JavaScriptCore/bytecode/BytecodeList.rb
r248426 r249372 1093 1093 argument: VirtualRegister, 1094 1094 } 1095 1096 op :check_traps1097 1095 1098 1096 op :log_shadow_chicken_prologue, -
trunk/Source/JavaScriptCore/bytecode/BytecodeUseDef.h
r248426 r249372 87 87 case op_create_cloned_arguments: 88 88 case op_get_rest_length: 89 case op_check_traps:90 89 case op_get_argument: 91 90 case op_nop: … … 347 346 case op_put_to_arguments: 348 347 case op_set_function_name: 349 case op_check_traps:350 348 case op_log_shadow_chicken_prologue: 351 349 case op_log_shadow_chicken_tail: -
trunk/Source/JavaScriptCore/bytecompiler/BytecodeGenerator.cpp
r249337 r249372 353 353 allocateAndEmitScope(); 354 354 355 emitCheckTraps();356 357 355 const FunctionStack& functionStack = programNode->functionStack(); 358 356 … … 474 472 allocateAndEmitScope(); 475 473 476 emitCheckTraps();477 478 474 if (functionNameIsInScope(functionNode->ident(), functionNode->functionMode())) { 479 475 ASSERT(parseMode != SourceParseMode::GeneratorBodyMode); … … 883 879 allocateAndEmitScope(); 884 880 885 emitCheckTraps();886 887 881 for (FunctionMetadataNode* function : evalNode->functionStack()) { 888 882 m_codeBlock->addFunctionDecl(makeFunction(function)); … … 969 963 allocateAndEmitScope(); 970 964 971 emitCheckTraps();972 973 965 m_calleeRegister.setIndex(CallFrameSlot::callee); 974 966 … … 1398 1390 { 1399 1391 OpLoopHint::emit(this); 1400 emitCheckTraps();1401 1392 } 1402 1393 … … 1404 1395 { 1405 1396 OpJmp::emit(this, target.bind(this)); 1406 }1407 1408 void BytecodeGenerator::emitCheckTraps()1409 {1410 OpCheckTraps::emit(this);1411 1397 } 1412 1398 -
trunk/Source/JavaScriptCore/bytecompiler/BytecodeGenerator.h
r249337 r249372 848 848 849 849 void emitEnter(); 850 void emitCheckTraps();851 850 852 851 RegisterID* emitHasIndexedProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName); -
trunk/Source/JavaScriptCore/dfg/DFGByteCodeParser.cpp
r249319 r249372 1442 1442 setDirect(stackEntry->remapOperand(virtualRegisterForLocal(i)), undefined, NormalSet); 1443 1443 1444 // We want to emit the SetLocals with an exit origin that points to the place we are jumping to.1445 1444 unsigned oldIndex = m_currentIndex; 1446 1445 auto oldStackTop = m_inlineStackTop; 1446 1447 // First, we emit check-traps operation pointing to bc#0 as exit. 1447 1448 m_inlineStackTop = stackEntry; 1449 m_currentIndex = 0; 1450 m_exitOK = true; 1451 addToGraph(Options::usePollingTraps() ? CheckTraps : InvalidationPoint); 1452 1453 // Then, we want to emit the SetLocals with an exit origin that points to the place we are jumping to. 1448 1454 m_currentIndex = opcodeLengths[op_enter]; 1449 1455 m_exitOK = true; … … 4780 4786 4781 4787 case op_enter: { 4788 addToGraph(Options::usePollingTraps() ? CheckTraps : InvalidationPoint); 4782 4789 Node* undefined = addToGraph(JSConstant, OpInfo(m_constantUndefined)); 4783 4790 // Initialize all locals to undefined. 4784 4791 for (int i = 0; i < m_inlineStackTop->m_codeBlock->numVars(); ++i) 4785 4792 set(virtualRegisterForLocal(i), undefined, ImmediateNakedSet); 4786 4787 4793 NEXT_OPCODE(op_enter); 4788 4794 } … … 6641 6647 6642 6648 addToGraph(LoopHint); 6649 addToGraph(Options::usePollingTraps() ? CheckTraps : InvalidationPoint); 6643 6650 NEXT_OPCODE(op_loop_hint); 6644 6651 } 6645 6652 6646 case op_check_traps: {6647 addToGraph(Options::usePollingTraps() ? CheckTraps : InvalidationPoint);6648 NEXT_OPCODE(op_check_traps);6649 }6650 6651 6653 case op_nop: { 6652 6654 addToGraph(Check); // We add a nop here so that basic block linking doesn't break. -
trunk/Source/JavaScriptCore/dfg/DFGCapabilities.cpp
r248426 r249372 206 206 case op_jbeloweq: 207 207 case op_loop_hint: 208 case op_check_traps:209 208 case op_nop: 210 209 case op_ret: -
trunk/Source/JavaScriptCore/jit/JIT.cpp
r249175 r249372 91 91 { 92 92 } 93 94 #if ENABLE(DFG_JIT)95 void JIT::emitEnterOptimizationCheck()96 {97 if (!canBeOptimized())98 return;99 100 JumpList skipOptimize;101 102 skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));103 ASSERT(!m_bytecodeOffset);104 105 copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm().topEntryFrame);106 107 callOperation(operationOptimize, m_bytecodeOffset);108 skipOptimize.append(branchTestPtr(Zero, returnValueGPR));109 farJump(returnValueGPR, GPRInfo::callFrameRegister);110 skipOptimize.link(this);111 }112 #endif113 93 114 94 void JIT::emitNotifyWrite(WatchpointSet* set) … … 384 364 DEFINE_OP(op_jtrue) 385 365 DEFINE_OP(op_loop_hint) 386 DEFINE_OP(op_check_traps)387 366 DEFINE_OP(op_nop) 388 367 DEFINE_OP(op_super_sampler_begin) … … 549 528 DEFINE_SLOWCASE_OP(op_jnstricteq) 550 529 DEFINE_SLOWCASE_OP(op_loop_hint) 551 DEFINE_SLOWCASE_OP(op_ check_traps)530 DEFINE_SLOWCASE_OP(op_enter) 552 531 DEFINE_SLOWCASE_OP(op_mod) 553 532 DEFINE_SLOWCASE_OP(op_mul) -
trunk/Source/JavaScriptCore/jit/JIT.h
r249175 r249372 576 576 void emit_op_jtrue(const Instruction*); 577 577 void emit_op_loop_hint(const Instruction*); 578 void emit_op_check_traps(const Instruction*);579 578 void emit_op_nop(const Instruction*); 580 579 void emit_op_super_sampler_begin(const Instruction*); … … 674 673 void emitSlow_op_jtrue(const Instruction*, Vector<SlowCaseEntry>::iterator&); 675 674 void emitSlow_op_loop_hint(const Instruction*, Vector<SlowCaseEntry>::iterator&); 676 void emitSlow_op_ check_traps(const Instruction*, Vector<SlowCaseEntry>::iterator&);675 void emitSlow_op_enter(const Instruction*, Vector<SlowCaseEntry>::iterator&); 677 676 void emitSlow_op_mod(const Instruction*, Vector<SlowCaseEntry>::iterator&); 678 677 void emitSlow_op_mul(const Instruction*, Vector<SlowCaseEntry>::iterator&); … … 869 868 int jumpTarget(const Instruction*, int target); 870 869 871 #if ENABLE(DFG_JIT)872 void emitEnterOptimizationCheck();873 #else874 void emitEnterOptimizationCheck() { }875 #endif876 877 870 #ifndef NDEBUG 878 871 void printBytecodeOperandTypes(int src1, int src2); -
trunk/Source/JavaScriptCore/jit/JITOpcodes.cpp
r249175 r249372 876 876 } 877 877 878 void JIT::emit_op_enter(const Instruction*)879 {880 // Even though CTI doesn't use them, we initialize our constant881 // registers to zap stale pointers, to avoid unnecessarily prolonging882 // object lifetime and increasing GC pressure.883 size_t count = m_codeBlock->numVars();884 for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)885 emitInitRegister(virtualRegisterForLocal(j).offset());886 887 emitWriteBarrier(m_codeBlock);888 889 emitEnterOptimizationCheck();890 }891 892 878 void JIT::emit_op_get_scope(const Instruction* currentInstruction) 893 879 { … … 1021 1007 void JIT::emit_op_loop_hint(const Instruction*) 1022 1008 { 1023 // Emit the JIT optimization check: 1009 // Check traps. 1010 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress()))); 1011 #if ENABLE(DFG_JIT) 1012 // Emit the JIT optimization check: 1024 1013 if (canBeOptimized()) { 1025 1014 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()), 1026 1015 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()))); 1027 1016 } 1017 #endif 1028 1018 } 1029 1019 1030 1020 void JIT::emitSlow_op_loop_hint(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 1031 1021 { 1022 linkSlowCase(iter); 1023 callOperation(operationHandleTraps); 1032 1024 #if ENABLE(DFG_JIT) 1033 1025 // Emit the slow path for the JIT optimization check: 1034 1026 if (canBeOptimized()) { 1035 linkAllSlowCases(iter); 1027 emitJumpSlowToHot(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForLoop()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())), currentInstruction->size()); 1028 linkSlowCase(iter); 1036 1029 1037 1030 copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm().topEntryFrame); 1038 1031 1039 1032 callOperation(operationOptimize, m_bytecodeOffset); 1040 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);1033 emitJumpSlowToHot(branchTestPtr(Zero, returnValueGPR), currentInstruction->size()); 1041 1034 if (!ASSERT_DISABLED) { 1042 1035 Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000)))); … … 1045 1038 } 1046 1039 farJump(returnValueGPR, GPRInfo::callFrameRegister); 1047 noOptimizedEntry.link(this);1048 1049 emitJumpSlowToHot(jump(), currentInstruction->size());1050 1040 } 1051 1041 #else 1052 1042 UNUSED_PARAM(currentInstruction); 1053 UNUSED_PARAM(iter);1054 1043 #endif 1055 1044 } 1056 1045 1057 void JIT::emit_op_check_traps(const Instruction*) 1058 { 1046 void JIT::emit_op_nop(const Instruction*) 1047 { 1048 } 1049 1050 void JIT::emit_op_super_sampler_begin(const Instruction*) 1051 { 1052 add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); 1053 } 1054 1055 void JIT::emit_op_super_sampler_end(const Instruction*) 1056 { 1057 sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); 1058 } 1059 1060 void JIT::emit_op_enter(const Instruction*) 1061 { 1062 // Even though JIT doesn't use them, we initialize our constant 1063 // registers to zap stale pointers, to avoid unnecessarily prolonging 1064 // object lifetime and increasing GC pressure. 1065 size_t count = m_codeBlock->numVars(); 1066 for (size_t i = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); i < count; ++i) 1067 emitInitRegister(virtualRegisterForLocal(i).offset()); 1068 1069 emitWriteBarrier(m_codeBlock); 1070 1071 // Check traps. 1059 1072 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress()))); 1060 } 1061 1062 void JIT::emit_op_nop(const Instruction*) 1063 { 1064 } 1065 1066 void JIT::emit_op_super_sampler_begin(const Instruction*) 1067 { 1068 add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); 1069 } 1070 1071 void JIT::emit_op_super_sampler_end(const Instruction*) 1072 { 1073 sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); 1074 } 1075 1076 void JIT::emitSlow_op_check_traps(const Instruction*, Vector<SlowCaseEntry>::iterator& iter) 1077 { 1078 linkAllSlowCases(iter); 1079 1073 1074 #if ENABLE(DFG_JIT) 1075 if (canBeOptimized()) 1076 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()))); 1077 #endif 1078 } 1079 1080 void JIT::emitSlow_op_enter(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 1081 { 1082 linkSlowCase(iter); 1080 1083 callOperation(operationHandleTraps); 1084 #if ENABLE(DFG_JIT) 1085 if (canBeOptimized()) { 1086 emitJumpSlowToHot(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())), currentInstruction->size()); 1087 linkSlowCase(iter); 1088 1089 ASSERT(!m_bytecodeOffset); 1090 1091 copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm().topEntryFrame); 1092 1093 callOperation(operationOptimize, m_bytecodeOffset); 1094 emitJumpSlowToHot(branchTestPtr(Zero, returnValueGPR), currentInstruction->size()); 1095 farJump(returnValueGPR, GPRInfo::callFrameRegister); 1096 } 1097 #else 1098 UNUSED_PARAM(currentInstruction); 1099 #endif 1081 1100 } 1082 1101 -
trunk/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
r249175 r249372 1003 1003 } 1004 1004 1005 1006 void JIT::emit_op_enter(const Instruction* currentInstruction)1007 {1008 emitEnterOptimizationCheck();1009 1010 // Even though JIT code doesn't use them, we initialize our constant1011 // registers to zap stale pointers, to avoid unnecessarily prolonging1012 // object lifetime and increasing GC pressure.1013 for (int i = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); i < m_codeBlock->numVars(); ++i)1014 emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());1015 1016 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);1017 slowPathCall.call();1018 }1019 1020 1005 void JIT::emit_op_get_scope(const Instruction* currentInstruction) 1021 1006 { -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
r249362 r249372 1672 1672 1673 1673 llintOp(op_loop_hint, OpLoopHint, macro (unused, unused, dispatch) 1674 # CheckTraps. 1675 loadp CodeBlock[cfr], t1 1676 loadp CodeBlock::m_vm[t1], t1 1677 btbnz VM::m_traps + VMTraps::m_needTrapHandling[t1], .handleTraps 1678 .afterHandlingTraps: 1674 1679 checkSwitchToJITForLoop() 1675 1680 dispatch() 1676 end)1677 1678 1679 llintOp(op_check_traps, OpCheckTraps, macro (unused, unused, dispatch)1680 loadp CodeBlock[cfr], t11681 loadp CodeBlock::m_vm[t1], t11682 loadb VM::m_traps+VMTraps::m_needTrapHandling[t1], t01683 btpnz t0, .handleTraps1684 .afterHandlingTraps:1685 dispatch()1686 1681 .handleTraps: 1687 callTrapHandler( .throwHandler)1682 callTrapHandler(_llint_throw_from_slow_path_trampoline) 1688 1683 jmp .afterHandlingTraps 1689 .throwHandler:1690 jmp _llint_throw_from_slow_path_trampoline1691 1684 end) 1692 1685 -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm
r248829 r249372 547 547 end 548 548 549 macro writeBarrierOnOperand(size, get, cellFieldName) 550 get(cellFieldName, t1) 551 loadConstantOrVariablePayload(size, t1, CellTag, t2, .writeBarrierDone) 549 macro writeBarrierOnCellWithReload(cell, reloadAfterSlowPath) 552 550 skipIfIsRememberedOrInEden( 553 t2,551 cell, 554 552 macro() 555 553 push cfr, PC 556 554 # We make two extra slots because cCall2 will poke. 557 555 subp 8, sp 558 move t2, a1 # t2 can be a0 on x86556 move cell, a1 # cell can be a0 559 557 move cfr, a0 560 558 cCall2Void(_llint_write_barrier_slow) 561 559 addp 8, sp 562 560 pop PC, cfr 561 reloadAfterSlowPath() 563 562 end) 563 end 564 565 macro writeBarrierOnOperand(size, get, cellFieldName) 566 get(cellFieldName, t1) 567 loadConstantOrVariablePayload(size, t1, CellTag, t2, .writeBarrierDone) 568 writeBarrierOnCellWithReload(t2, macro() end) 564 569 .writeBarrierDone: 565 570 end … … 581 586 loadMacro(t3) 582 587 583 skipIfIsRememberedOrInEden( 584 t3, 585 macro() 586 push cfr, PC 587 # We make two extra slots because cCall2 will poke. 588 subp 8, sp 589 move cfr, a0 590 move t3, a1 591 cCall2Void(_llint_write_barrier_slow) 592 addp 8, sp 593 pop PC, cfr 594 end) 588 writeBarrierOnCellWithReload(t3, macro() end) 595 589 .writeBarrierDone: 596 590 end … … 708 702 traceExecution() 709 703 checkStackPointerAlignment(t2, 0xdead00e1) 710 loadp CodeBlock[cfr], t 2 // t2<CodeBlock> = cfr.CodeBlock711 loadi CodeBlock::m_numVars[t 2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars704 loadp CodeBlock[cfr], t1 // t1<CodeBlock> = cfr.CodeBlock 705 loadi CodeBlock::m_numVars[t1], t2 // t2<size_t> = t1<CodeBlock>.m_numVars 712 706 subi CalleeSaveSpaceAsVirtualRegisters, t2 713 707 move cfr, t3 … … 715 709 btiz t2, .opEnterDone 716 710 move UndefinedTag, t0 717 move 0, t1718 711 negi t2 719 712 .opEnterLoop: 720 713 storei t0, TagOffset[t3, t2, 8] 721 storei t1, PayloadOffset[t3, t2, 8]714 storei 0, PayloadOffset[t3, t2, 8] 722 715 addi 1, t2 723 716 btinz t2, .opEnterLoop 724 717 .opEnterDone: 725 callSlowPath(_slow_path_enter) 718 writeBarrierOnCellWithReload(t1, macro () 719 loadp CodeBlock[cfr], t1 # Reload CodeBlock 720 end) 721 # Checking traps. 722 loadp CodeBlock::m_vm[t1], t1 723 btpnz VM::m_traps + VMTraps::m_needTrapHandling[t1], .handleTraps 724 .afterHandlingTraps: 726 725 dispatchOp(narrow, op_enter) 727 726 .handleTraps: 727 callTrapHandler(_llint_throw_from_slow_path_trampoline) 728 jmp .afterHandlingTraps 728 729 729 730 llintOpWithProfile(op_get_argument, OpGetArgument, macro (size, get, dispatch, return) -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
r249184 r249372 511 511 end 512 512 513 macro writeBarrierOnOperandWithReload(size, get, cellFieldName, reloadAfterSlowPath) 514 get(cellFieldName, t1) 515 loadConstantOrVariableCell(size, t1, t2, .writeBarrierDone) 513 macro writeBarrierOnCellWithReload(cell, reloadAfterSlowPath) 516 514 skipIfIsRememberedOrInEden( 517 t2,515 cell, 518 516 macro() 519 517 push PB, PC 520 move t2, a1 # t2 can be a0 (not on 64 bits, but better safe than sorry)518 move cell, a1 # cell can be a0 521 519 move cfr, a0 522 520 cCall2Void(_llint_write_barrier_slow) … … 524 522 reloadAfterSlowPath() 525 523 end) 524 end 525 526 macro writeBarrierOnOperandWithReload(size, get, cellFieldName, reloadAfterSlowPath) 527 get(cellFieldName, t1) 528 loadConstantOrVariableCell(size, t1, t2, .writeBarrierDone) 529 writeBarrierOnCellWithReload(t2, reloadAfterSlowPath) 526 530 .writeBarrierDone: 527 531 end … … 546 550 547 551 loadMacro(t3) 548 skipIfIsRememberedOrInEden( 549 t3, 550 macro() 551 push PB, PC 552 move cfr, a0 553 move t3, a1 554 cCall2Void(_llint_write_barrier_slow) 555 pop PC, PB 556 end) 552 writeBarrierOnCellWithReload(t3, macro() end) 557 553 .writeBarrierDone: 558 554 end … … 687 683 traceExecution() 688 684 checkStackPointerAlignment(t2, 0xdead00e1) 689 loadp CodeBlock[cfr], t 2 // t2<CodeBlock> = cfr.CodeBlock690 loadi CodeBlock::m_numVars[t 2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars685 loadp CodeBlock[cfr], t3 // t3<CodeBlock> = cfr.CodeBlock 686 loadi CodeBlock::m_numVars[t3], t2 // t2<size_t> = t3<CodeBlock>.m_numVars 691 687 subq CalleeSaveSpaceAsVirtualRegisters, t2 692 688 move cfr, t1 … … 701 697 btqnz t2, .opEnterLoop 702 698 .opEnterDone: 703 callSlowPath(_slow_path_enter) 699 writeBarrierOnCellWithReload(t3, macro () 700 loadp CodeBlock[cfr], t3 # Reload CodeBlock 701 end) 702 loadp CodeBlock::m_vm[t3], t1 703 btbnz VM::m_traps + VMTraps::m_needTrapHandling[t1], .handleTraps 704 .afterHandlingTraps: 704 705 dispatchOp(narrow, op_enter) 705 706 .handleTraps: 707 callTrapHandler(_llint_throw_from_slow_path_trampoline) 708 jmp .afterHandlingTraps 706 709 707 710 llintOpWithProfile(op_get_argument, OpGetArgument, macro (size, get, dispatch, return) -
trunk/Source/JavaScriptCore/runtime/CommonSlowPaths.cpp
r249175 r249372 889 889 } 890 890 891 SLOW_PATH_DECL(slow_path_enter)892 {893 BEGIN();894 CodeBlock* codeBlock = exec->codeBlock();895 Heap::heap(codeBlock)->writeBarrier(codeBlock);896 END();897 }898 899 891 SLOW_PATH_DECL(slow_path_get_enumerable_length) 900 892 { -
trunk/Source/JavaScriptCore/runtime/CommonSlowPaths.h
r240254 r249372 324 324 SLOW_PATH_HIDDEN_DECL(slow_path_create_cloned_arguments); 325 325 SLOW_PATH_HIDDEN_DECL(slow_path_create_this); 326 SLOW_PATH_HIDDEN_DECL(slow_path_enter);327 326 SLOW_PATH_HIDDEN_DECL(slow_path_get_callee); 328 327 SLOW_PATH_HIDDEN_DECL(slow_path_to_this);
Note: See TracChangeset
for help on using the changeset viewer.