Changeset 157457 in webkit


Ignore:
Timestamp:
Oct 15, 2013 11:33:04 AM (10 years ago)
Author:
msaboff@apple.com
Message:

transition void cti_op_* methods to JIT operations.
https://bugs.webkit.org/show_bug.cgi?id=122617

Reviewed by Geoffrey Garen.

Converted the follow stubs to JIT operations:

cti_handle_watchdog_timer
cti_op_debug
cti_op_pop_scope
cti_op_profile_did_call
cti_op_profile_will_call
cti_op_put_by_index
cti_op_put_getter_setter
cti_op_tear_off_activation
cti_op_tear_off_arguments
cti_op_throw_static_error
cti_optimize

  • dfg/DFGOperations.cpp:
  • dfg/DFGOperations.h:
  • jit/CCallHelpers.h:

(JSC::CCallHelpers::setupArgumentsWithExecState):
(JSC::CCallHelpers::setupThreeStubArgsGPR):
(JSC::CCallHelpers::setupStubArguments):
(JSC::CCallHelpers::setupStubArguments134):

  • jit/JIT.cpp:

(JSC::JIT::emitEnterOptimizationCheck):

  • jit/JIT.h:
  • jit/JITInlines.h:

(JSC::JIT::callOperation):

  • jit/JITOpcodes.cpp:

(JSC::JIT::emit_op_tear_off_activation):
(JSC::JIT::emit_op_tear_off_arguments):
(JSC::JIT::emit_op_push_with_scope):
(JSC::JIT::emit_op_pop_scope):
(JSC::JIT::emit_op_push_name_scope):
(JSC::JIT::emit_op_throw_static_error):
(JSC::JIT::emit_op_debug):
(JSC::JIT::emit_op_profile_will_call):
(JSC::JIT::emit_op_profile_did_call):
(JSC::JIT::emitSlow_op_loop_hint):

  • jit/JITOpcodes32_64.cpp:

(JSC::JIT::emit_op_push_with_scope):
(JSC::JIT::emit_op_pop_scope):
(JSC::JIT::emit_op_push_name_scope):
(JSC::JIT::emit_op_throw_static_error):
(JSC::JIT::emit_op_debug):
(JSC::JIT::emit_op_profile_will_call):
(JSC::JIT::emit_op_profile_did_call):

  • jit/JITOperations.cpp:
  • jit/JITOperations.h:
  • jit/JITPropertyAccess.cpp:

(JSC::JIT::emit_op_put_by_index):
(JSC::JIT::emit_op_put_getter_setter):

  • jit/JITPropertyAccess32_64.cpp:

(JSC::JIT::emit_op_put_by_index):
(JSC::JIT::emit_op_put_getter_setter):

  • jit/JITStubs.cpp:
  • jit/JITStubs.h:
Location:
trunk/Source/JavaScriptCore
Files:
15 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/ChangeLog

    r157452 r157457  
     12013-10-14  Michael Saboff  <msaboff@apple.com>
     2
     3        transition void cti_op_* methods to JIT operations.
     4        https://bugs.webkit.org/show_bug.cgi?id=122617
     5
     6        Reviewed by Geoffrey Garen.
     7
     8        Converted the follow stubs to JIT operations:
     9            cti_handle_watchdog_timer
     10            cti_op_debug
     11            cti_op_pop_scope
     12            cti_op_profile_did_call
     13            cti_op_profile_will_call
     14            cti_op_put_by_index
     15            cti_op_put_getter_setter
     16            cti_op_tear_off_activation
     17            cti_op_tear_off_arguments
     18            cti_op_throw_static_error
     19            cti_optimize
     20
     21        * dfg/DFGOperations.cpp:
     22        * dfg/DFGOperations.h:
     23        * jit/CCallHelpers.h:
     24        (JSC::CCallHelpers::setupArgumentsWithExecState):
     25        (JSC::CCallHelpers::setupThreeStubArgsGPR):
     26        (JSC::CCallHelpers::setupStubArguments):
     27        (JSC::CCallHelpers::setupStubArguments134):
     28        * jit/JIT.cpp:
     29        (JSC::JIT::emitEnterOptimizationCheck):
     30        * jit/JIT.h:
     31        * jit/JITInlines.h:
     32        (JSC::JIT::callOperation):
     33        * jit/JITOpcodes.cpp:
     34        (JSC::JIT::emit_op_tear_off_activation):
     35        (JSC::JIT::emit_op_tear_off_arguments):
     36        (JSC::JIT::emit_op_push_with_scope):
     37        (JSC::JIT::emit_op_pop_scope):
     38        (JSC::JIT::emit_op_push_name_scope):
     39        (JSC::JIT::emit_op_throw_static_error):
     40        (JSC::JIT::emit_op_debug):
     41        (JSC::JIT::emit_op_profile_will_call):
     42        (JSC::JIT::emit_op_profile_did_call):
     43        (JSC::JIT::emitSlow_op_loop_hint):
     44        * jit/JITOpcodes32_64.cpp:
     45        (JSC::JIT::emit_op_push_with_scope):
     46        (JSC::JIT::emit_op_pop_scope):
     47        (JSC::JIT::emit_op_push_name_scope):
     48        (JSC::JIT::emit_op_throw_static_error):
     49        (JSC::JIT::emit_op_debug):
     50        (JSC::JIT::emit_op_profile_will_call):
     51        (JSC::JIT::emit_op_profile_did_call):
     52        * jit/JITOperations.cpp:
     53        * jit/JITOperations.h:
     54        * jit/JITPropertyAccess.cpp:
     55        (JSC::JIT::emit_op_put_by_index):
     56        (JSC::JIT::emit_op_put_getter_setter):
     57        * jit/JITPropertyAccess32_64.cpp:
     58        (JSC::JIT::emit_op_put_by_index):
     59        (JSC::JIT::emit_op_put_getter_setter):
     60        * jit/JITStubs.cpp:
     61        * jit/JITStubs.h:
     62
    1632013-10-15  Julien Brianceau  <jbriance@cisco.com>
    264
  • trunk/Source/JavaScriptCore/dfg/DFGOperations.cpp

    r157404 r157457  
    693693}
    694694
    695 void JIT_OPERATION operationTearOffArguments(ExecState* exec, JSCell* argumentsCell, JSCell* activationCell)
    696 {
    697     ASSERT(exec->codeBlock()->usesArguments());
    698     if (activationCell) {
    699         jsCast<Arguments*>(argumentsCell)->didTearOffActivation(exec, jsCast<JSActivation*>(activationCell));
    700         return;
    701     }
    702     jsCast<Arguments*>(argumentsCell)->tearOff(exec);
    703 }
    704 
    705695void JIT_OPERATION operationTearOffInlinedArguments(
    706696    ExecState* exec, JSCell* argumentsCell, JSCell* activationCell, InlineCallFrame* inlineCallFrame)
  • trunk/Source/JavaScriptCore/dfg/DFGOperations.h

    r157404 r157457  
    9292size_t JIT_OPERATION operationCompareStrictEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
    9393JSCell* JIT_OPERATION operationCreateInlinedArguments(ExecState*, InlineCallFrame*) WTF_INTERNAL;
    94 void JIT_OPERATION operationTearOffArguments(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
    9594void JIT_OPERATION operationTearOffInlinedArguments(ExecState*, JSCell*, JSCell*, InlineCallFrame*) WTF_INTERNAL;
    9695EncodedJSValue JIT_OPERATION operationGetArgumentsLength(ExecState*, int32_t) WTF_INTERNAL;
  • trunk/Source/JavaScriptCore/jit/CCallHelpers.h

    r157427 r157457  
    211211    }
    212212
     213    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, TrustedImm32 arg3)
     214    {
     215        resetCallArguments();
     216        addCallArgument(GPRInfo::callFrameRegister);
     217        addCallArgument(arg1);
     218        addCallArgument(arg2);
     219        addCallArgument(arg3);
     220    }
     221
    213222    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, TrustedImmPtr arg3)
    214223    {
     
    373382    }
    374383
    375     ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, GPRReg arg3, GPRReg arg4, GPRReg arg5)
     384    ALWAYS_INLINE void setupArgumentsWithExecState(TrustedImmPtr arg1, GPRReg arg2, GPRReg arg3, TrustedImm32 arg4)
     385    {
     386        resetCallArguments();
     387        addCallArgument(GPRInfo::callFrameRegister);
     388        addCallArgument(arg1);
     389        addCallArgument(arg2);
     390        addCallArgument(arg3);
     391        addCallArgument(arg4);
     392    }
     393
     394    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, TrustedImmPtr arg2, GPRReg arg3, GPRReg arg4)
     395    {
     396        resetCallArguments();
     397        addCallArgument(GPRInfo::callFrameRegister);
     398        addCallArgument(arg1);
     399        addCallArgument(arg2);
     400        addCallArgument(arg3);
     401        addCallArgument(arg4);
     402    }
     403
     404ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, GPRReg arg3, GPRReg arg4, GPRReg arg5)
    376405    {
    377406        resetCallArguments();
     
    384413    }
    385414
     415    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, TrustedImm32 arg3, GPRReg arg4, GPRReg arg5)
     416    {
     417        resetCallArguments();
     418        addCallArgument(GPRInfo::callFrameRegister);
     419        addCallArgument(arg1);
     420        addCallArgument(arg2);
     421        addCallArgument(arg3);
     422        addCallArgument(arg4);
     423        addCallArgument(arg5);
     424    }
     425   
    386426    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, GPRReg arg2, GPRReg arg3, GPRReg arg4, TrustedImmPtr arg5)
    387427    {
     
    465505            swap(destA, destB);
    466506    }
     507
     508    template<GPRReg destA, GPRReg destB, GPRReg destC>
     509    void setupThreeStubArgsGPR(GPRReg srcA, GPRReg srcB, GPRReg srcC)
     510    {
     511        // If neither of srcB/srcC are in our way, then we can move srcA into place.
     512        // Then we can use setupTwoStubArgs to fix srcB/srcC.
     513        if (srcB != destA && srcC != destA) {
     514            move(srcA, destA);
     515            setupTwoStubArgsGPR<destB, destC>(srcB, srcC);
     516            return;
     517        }
     518       
     519        // If neither of srcA/srcC are in our way, then we can move srcB into place.
     520        // Then we can use setupTwoStubArgs to fix srcA/srcC.
     521        if (srcA != destB && srcC != destB) {
     522            move(srcB, destB);
     523            setupTwoStubArgsGPR<destA, destC>(srcA, srcC);
     524            return;
     525        }
     526       
     527        // If neither of srcA/srcB are in our way, then we can move srcC into place.
     528        // Then we can use setupTwoStubArgs to fix srcA/srcB.
     529        if (srcA != destC && srcB != destC) {
     530            move(srcC, destC);
     531            setupTwoStubArgsGPR<destA, destB>(srcA, srcB);
     532            return;
     533        }
     534       
     535        // If we get here, we haven't been able to move any of srcA/srcB/srcC.
     536        // Since all three are blocked, then all three must already be in the argument register.
     537        // But are they in the right ones?
     538       
     539        // First, ensure srcA is in place.
     540        if (srcA != destA) {
     541            swap(srcA, destA);
     542           
     543            // If srcA wasn't in argumentGPR1, one of srcB/srcC must be.
     544            ASSERT(srcB == destA || srcC == destA);
     545            // If srcB was in argumentGPR1 it no longer is (due to the swap).
     546            // Otherwise srcC must have been. Mark him as moved.
     547            if (srcB == destA)
     548                srcB = srcA;
     549            else
     550                srcC = srcA;
     551        }
     552       
     553        // Either srcB & srcC need swapping, or we're all done.
     554        ASSERT((srcB == destB || srcC == destC)
     555            || (srcB == destC || srcC == destB));
     556       
     557        if (srcB != destB)
     558            swap(destB, destC);
     559    }
     560
    467561#if CPU(X86_64)
    468562    template<FPRReg destA, FPRReg destB>
     
    517611        setupTwoStubArgsGPR<GPRInfo::argumentGPR1, GPRInfo::argumentGPR2>(arg1, arg2);
    518612    }
     613
    519614    void setupStubArguments(GPRReg arg1, GPRReg arg2, GPRReg arg3)
    520615    {
    521         // If neither of arg2/arg3 are in our way, then we can move arg1 into place.
    522         // Then we can use setupTwoStubArgs to fix arg2/arg3.
    523         if (arg2 != GPRInfo::argumentGPR1 && arg3 != GPRInfo::argumentGPR1) {
    524             move(arg1, GPRInfo::argumentGPR1);
    525             setupTwoStubArgsGPR<GPRInfo::argumentGPR2, GPRInfo::argumentGPR3>(arg2, arg3);
    526             return;
    527         }
    528 
    529         // If neither of arg1/arg3 are in our way, then we can move arg2 into place.
    530         // Then we can use setupTwoStubArgs to fix arg1/arg3.
    531         if (arg1 != GPRInfo::argumentGPR2 && arg3 != GPRInfo::argumentGPR2) {
    532             move(arg2, GPRInfo::argumentGPR2);
    533             setupTwoStubArgsGPR<GPRInfo::argumentGPR1, GPRInfo::argumentGPR3>(arg1, arg3);
    534             return;
    535         }
    536 
    537         // If neither of arg1/arg2 are in our way, then we can move arg3 into place.
    538         // Then we can use setupTwoStubArgs to fix arg1/arg2.
    539         if (arg1 != GPRInfo::argumentGPR3 && arg2 != GPRInfo::argumentGPR3) {
    540             move(arg3, GPRInfo::argumentGPR3);
    541             setupTwoStubArgsGPR<GPRInfo::argumentGPR1, GPRInfo::argumentGPR2>(arg1, arg2);
    542             return;
    543         }
    544 
    545         // If we get here, we haven't been able to move any of arg1/arg2/arg3.
    546         // Since all three are blocked, then all three must already be in the argument register.
    547         // But are they in the right ones?
    548 
    549         // First, ensure arg1 is in place.
    550         if (arg1 != GPRInfo::argumentGPR1) {
    551             swap(arg1, GPRInfo::argumentGPR1);
    552 
    553             // If arg1 wasn't in argumentGPR1, one of arg2/arg3 must be.
    554             ASSERT(arg2 == GPRInfo::argumentGPR1 || arg3 == GPRInfo::argumentGPR1);
    555             // If arg2 was in argumentGPR1 it no longer is (due to the swap).
    556             // Otherwise arg3 must have been. Mark him as moved.
    557             if (arg2 == GPRInfo::argumentGPR1)
    558                 arg2 = arg1;
    559             else
    560                 arg3 = arg1;
    561         }
    562 
    563         // Either arg2 & arg3 need swapping, or we're all done.
    564         ASSERT((arg2 == GPRInfo::argumentGPR2 || arg3 == GPRInfo::argumentGPR3)
    565             || (arg2 == GPRInfo::argumentGPR3 || arg3 == GPRInfo::argumentGPR2));
    566 
    567         if (arg2 != GPRInfo::argumentGPR2)
    568             swap(GPRInfo::argumentGPR2, GPRInfo::argumentGPR3);
    569     }
    570 
     616        setupThreeStubArgsGPR<GPRInfo::argumentGPR1, GPRInfo::argumentGPR2, GPRInfo::argumentGPR3>(arg1, arg2, arg3);
     617    }
     618
     619    void setupStubArguments134(GPRReg arg1, GPRReg arg3, GPRReg arg4)
     620    {
     621        setupThreeStubArgsGPR<GPRInfo::argumentGPR1, GPRInfo::argumentGPR3, GPRInfo::argumentGPR4>(arg1, arg3, arg4);
     622    }
    571623#if CPU(MIPS)
    572624#define POKE_ARGUMENT_OFFSET 4
     
    10401092    }
    10411093
     1094    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, TrustedImmPtr arg2, GPRReg arg3,  GPRReg arg4)
     1095    {
     1096        poke(arg4, POKE_ARGUMENT_OFFSET);
     1097        setupArgumentsWithExecState(arg1, arg2, arg3);
     1098    }
     1099
    10421100    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, TrustedImmPtr arg2, TrustedImm32 arg3, GPRReg arg4)
    10431101    {
     
    11901248        move(arg2, GPRInfo::argumentGPR2);
    11911249        move(arg3, GPRInfo::argumentGPR3);
     1250        move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
     1251    }
     1252
     1253    ALWAYS_INLINE void setupArgumentsWithExecState(GPRReg arg1, TrustedImmPtr arg2, GPRReg arg3, GPRReg arg4)
     1254    {
     1255        setupStubArguments134(arg1, arg3, arg4);
     1256        move(arg2, GPRInfo::argumentGPR2);
    11921257        move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
    11931258    }
  • trunk/Source/JavaScriptCore/jit/JIT.cpp

    r157411 r157457  
    105105        return;
    106106
    107     Jump skipOptimize = branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()));
    108     JITStubCall stubCall(this, cti_optimize);
    109     stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
     107    JumpList skipOptimize;
     108   
     109    skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
    110110    ASSERT(!m_bytecodeOffset);
    111     stubCall.call();
     111    callOperation(operationOptimize, m_bytecodeOffset);
     112    skipOptimize.append(branchTestPtr(Zero, returnValueRegister));
     113    jump(returnValueRegister);
    112114    skipOptimize.link(this);
    113115}
  • trunk/Source/JavaScriptCore/jit/JIT.h

    r157439 r157457  
    871871        MacroAssembler::Call callOperation(J_JITOperation_EZ, int, int32_t);
    872872        MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, size_t);
     873        MacroAssembler::Call callOperation(P_JITOperation_EZ, int32_t);
    873874        MacroAssembler::Call callOperation(S_JITOperation_ECC, RegisterID, RegisterID);
    874875        MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID);
    875876        MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID);
    876877        MacroAssembler::Call callOperation(S_JITOperation_EOJss, RegisterID, RegisterID);
     878        MacroAssembler::Call callOperation(V_JITOperation_E);
     879        MacroAssembler::Call callOperation(V_JITOperation_EC, RegisterID);
     880        MacroAssembler::Call callOperation(V_JITOperation_ECC, RegisterID, RegisterID);
     881        MacroAssembler::Call callOperation(V_JITOperation_ECICC, RegisterID, const Identifier*, RegisterID, RegisterID);
     882        MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, int32_t);
     883        MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID);
     884        MacroAssembler::Call callOperation(V_JITOperation_EJIdJJ, RegisterID, const Identifier*, RegisterID, RegisterID);
    877885#if USE(JSVALUE64)
    878886        MacroAssembler::Call callOperation(V_JITOperation_EJJI, RegisterID, RegisterID, StringImpl*);
     
    880888        MacroAssembler::Call callOperation(V_JITOperation_EJJI, RegisterID, RegisterID, RegisterID, RegisterID, StringImpl*);
    881889#endif
     890        MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, int32_t, RegisterID);
     891        MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, int32_t);
    882892        MacroAssembler::Call callOperation(V_JITOperation_EPc, Instruction*);
     893        MacroAssembler::Call callOperation(V_JITOperation_EZ, int32_t);
    883894        MacroAssembler::Call callOperationWithCallFrameRollbackOnException(J_JITOperation_E);
    884895        MacroAssembler::Call callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb, CodeBlock*);
     
    893904        MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID, RegisterID);
    894905        MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID, RegisterID, RegisterID);
     906        MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, RegisterID, int32_t);
     907        MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID, RegisterID);
     908        MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, RegisterID, int32_t);
     909        MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, RegisterID, int32_t, RegisterID, RegisterID);
    895910#endif
    896911
  • trunk/Source/JavaScriptCore/jit/JITInlines.h

    r157439 r157457  
    337337}
    338338
     339ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EZ operation, int32_t op)
     340{
     341    setupArgumentsWithExecState(TrustedImm32(op));
     342    return appendCallWithExceptionCheck(operation);
     343}
     344
    339345ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1, size_t arg2)
    340346{
     
    367373}
    368374
     375ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_E operation)
     376{
     377    setupArgumentsExecState();
     378    return appendCallWithExceptionCheck(operation);
     379}
     380
     381ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EIdJZ operation, const Identifier* identOp1, RegisterID regOp2, int32_t op3)
     382{
     383    setupArgumentsWithExecState(TrustedImmPtr(identOp1), regOp2, TrustedImm32(op3));
     384    return appendCallWithExceptionCheck(operation);
     385}
     386
     387ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp)
     388{
     389    setupArgumentsWithExecState(regOp);
     390    return appendCallWithExceptionCheck(operation);
     391}
     392
     393ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, RegisterID regOp)
     394{
     395    setupArgumentsWithExecState(regOp);
     396    return appendCallWithExceptionCheck(operation);
     397}
     398
     399ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
     400{
     401    setupArgumentsWithExecState(regOp1, regOp2);
     402    return appendCallWithExceptionCheck(operation);
     403}
     404
     405ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
     406{
     407    setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
     408    return appendCallWithExceptionCheck(operation);
     409}
     410
     411ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1, int32_t op2)
     412{
     413    setupArgumentsWithExecState(regOp1, TrustedImm32(op2));
     414    return appendCallWithExceptionCheck(operation);
     415}
     416
     417ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1, int32_t op2, RegisterID regOp3)
     418{
     419    setupArgumentsWithExecState(regOp1, TrustedImm32(op2), regOp3);
     420    return appendCallWithExceptionCheck(operation);
     421}
     422
    369423ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EPc operation, Instruction* bytecodePC)
    370424{
    371425    setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
     426    return appendCallWithExceptionCheck(operation);
     427}
     428
     429ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZ operation, int32_t op)
     430{
     431    setupArgumentsWithExecState(TrustedImm32(op));
    372432    return appendCallWithExceptionCheck(operation);
    373433}
     
    457517}
    458518
     519ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECICC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
     520{
     521    setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
     522    return appendCallWithExceptionCheck(operation);
     523}
     524
     525ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload)
     526{
     527    setupArgumentsWithExecState(regOp1Payload, regOp1Tag);
     528    return appendCallWithExceptionCheck(operation);
     529}
     530
     531ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EIdJZ operation, const Identifier* identOp1, RegisterID regOp2Tag, RegisterID regOp2Payload, int32_t op3)
     532{
     533    setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG TrustedImmPtr(identOp1), regOp2Payload, regOp2Tag, TrustedImm32(op3));
     534    return appendCallWithExceptionCheck(operation);
     535}
     536
    459537ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJI operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, StringImpl* uid)
    460538{
     
    463541}
    464542
     543ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2)
     544{
     545    setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2));
     546    return appendCallWithExceptionCheck(operation);
     547}
     548
     549ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2, RegisterID regOp3Tag, RegisterID regOp3Payload)
     550{
     551    setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2), regOp3Payload, regOp3Tag);
     552    return appendCallWithExceptionCheck(operation);
     553}
    465554#undef EABI_32BIT_DUMMY_ARG
    466555#undef SH4_32BIT_DUMMY_ARG
  • trunk/Source/JavaScriptCore/jit/JITOpcodes.cpp

    r157439 r157457  
    249249    int activation = currentInstruction[1].u.operand;
    250250    Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
    251     JITStubCall stubCall(this, cti_op_tear_off_activation);
    252     stubCall.addArgument(activation, regT2);
    253     stubCall.call();
     251    emitGetVirtualRegister(activation, regT0);
     252    callOperation(operationTearOffActivation, regT0);
    254253    activationNotCreated.link(this);
    255254}
     
    261260
    262261    Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset())));
    263     JITStubCall stubCall(this, cti_op_tear_off_arguments);
    264     stubCall.addArgument(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT2);
    265     stubCall.addArgument(activation, regT2);
    266     stubCall.call();
     262    emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
     263    emitGetVirtualRegister(activation, regT1);
     264    callOperation(operationTearOffArguments, regT0, regT1);
    267265    argsNotCreated.link(this);
    268266}
     
    601599void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
    602600{
    603     JITStubCall stubCall(this, cti_op_push_with_scope);
    604     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    605     stubCall.call();
     601    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
     602    callOperation(operationPushWithScope, regT0);
    606603}
    607604
    608605void JIT::emit_op_pop_scope(Instruction*)
    609606{
    610     JITStubCall(this, cti_op_pop_scope).call();
     607    callOperation(operationPopScope);
    611608}
    612609
     
    664661void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
    665662{
    666     JITStubCall stubCall(this, cti_op_push_name_scope);
    667     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
    668     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
    669     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
    670     stubCall.call();
     663    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
     664    callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT0, currentInstruction[3].u.operand);
    671665}
    672666
     
    730724void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
    731725{
    732     JITStubCall stubCall(this, cti_op_throw_static_error);
    733     if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
    734         stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
    735     else
    736         stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
    737     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
    738     stubCall.call();
     726    move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
     727    callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
    739728}
    740729
     
    745734    breakpoint();
    746735#else
    747     JITStubCall stubCall(this, cti_op_debug);
    748     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
    749     stubCall.call();
     736    callOperation(operationDebug, currentInstruction[1].u.operand);
    750737#endif
    751738}
     
    920907void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
    921908{
    922     JITStubCall stubCall(this, cti_op_profile_will_call);
    923     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
    924     stubCall.call();
     909    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
     910    callOperation(operationProfileWillCall, regT0);
    925911}
    926912
    927913void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
    928914{
    929     JITStubCall stubCall(this, cti_op_profile_did_call);
    930     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
    931     stubCall.call();
     915    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
     916    callOperation(operationProfileDidCall, regT0);
    932917}
    933918
     
    11691154        linkSlowCase(iter);
    11701155       
    1171         JITStubCall stubCall(this, cti_optimize);
    1172         stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
    1173         stubCall.call();
    1174        
     1156        callOperation(operationOptimize, m_bytecodeOffset);
     1157        Jump noOptimizedEntry = branchTestPtr(Zero, returnValueRegister);
     1158        jump(returnValueRegister);
     1159        noOptimizedEntry.link(this);
     1160
    11751161        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
    11761162    }
     
    11801166    if (m_vm->watchdog.isEnabled()) {
    11811167        linkSlowCase(iter);
    1182 
    1183         JITStubCall stubCall(this, cti_handle_watchdog_timer);
    1184         stubCall.call();
     1168        callOperation(operationHandleWatchdogTimer);
    11851169
    11861170        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
  • trunk/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp

    r157439 r157457  
    930930void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
    931931{
    932     JITStubCall stubCall(this, cti_op_push_with_scope);
    933     stubCall.addArgument(currentInstruction[1].u.operand);
    934     stubCall.call();
     932    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
     933    callOperation(operationPushWithScope, regT1, regT0);
    935934}
    936935
    937936void JIT::emit_op_pop_scope(Instruction*)
    938937{
    939     JITStubCall(this, cti_op_pop_scope).call();
     938    callOperation(operationPopScope);
    940939}
    941940
     
    969968void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
    970969{
    971     JITStubCall stubCall(this, cti_op_push_name_scope);
    972     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
    973     stubCall.addArgument(currentInstruction[2].u.operand);
    974     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
    975     stubCall.call();
     970    emitLoad(currentInstruction[2].u.operand, regT1, regT0);
     971    callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT1, regT0, currentInstruction[3].u.operand);
    976972}
    977973
     
    10421038void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
    10431039{
    1044     unsigned message = currentInstruction[1].u.operand;
    1045 
    1046     JITStubCall stubCall(this, cti_op_throw_static_error);
    1047     stubCall.addArgument(m_codeBlock->getConstant(message));
    1048     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
    1049     stubCall.call();
     1040    emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
     1041    callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
    10501042}
    10511043
     
    10561048    breakpoint();
    10571049#else
    1058     JITStubCall stubCall(this, cti_op_debug);
    1059     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    1060     stubCall.call();
     1050    callOperation(operationDebug, currentInstruction[1].u.operand);
    10611051#endif
    10621052}
     
    11801170void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
    11811171{
    1182     JITStubCall stubCall(this, cti_op_profile_will_call);
    1183     stubCall.addArgument(currentInstruction[1].u.operand);
    1184     stubCall.call();
     1172    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
     1173    callOperation(operationProfileWillCall, regT1, regT0);
    11851174}
    11861175
    11871176void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
    11881177{
    1189     JITStubCall stubCall(this, cti_op_profile_did_call);
    1190     stubCall.addArgument(currentInstruction[1].u.operand);
    1191     stubCall.call();
     1178    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
     1179    callOperation(operationProfileDidCall, regT1, regT0);
    11921180}
    11931181
  • trunk/Source/JavaScriptCore/jit/JITOperations.cpp

    r157439 r157457  
    3131#include "ArrayConstructor.h"
    3232#include "CommonSlowPaths.h"
     33#include "DFGCompilationMode.h"
     34#include "DFGDriver.h"
     35#include "DFGOSREntry.h"
     36#include "DFGWorklist.h"
    3337#include "Error.h"
    3438#include "GetterSetter.h"
    3539#include "HostCallReturnValue.h"
    3640#include "JITOperationWrappers.h"
     41#include "JITToDFGDeferredCompilationCallback.h"
    3742#include "JSGlobalObjectFunctions.h"
     43#include "JSNameScope.h"
    3844#include "JSPropertyNameIterator.h"
     45#include "JSWithScope.h"
    3946#include "ObjectConstructor.h"
    4047#include "Operations.h"
     
    797804}
    798805
     806void JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
     807{
     808    VM& vm = exec->vm();
     809    NativeCallFrameTracer tracer(&vm, exec);
     810
     811    if (UNLIKELY(vm.watchdog.didFire(exec)))
     812        vm.throwException(exec, createTerminatedExecutionException(&vm));
     813}
     814
     815void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
     816{
     817    VM& vm = exec->vm();
     818    NativeCallFrameTracer tracer(&vm, exec);
     819
     820    String message = errorDescriptionForValue(exec, JSValue::decode(encodedValue))->value(exec);
     821    if (referenceErrorFlag)
     822        vm.throwException(exec, createReferenceError(exec, message));
     823    else
     824        vm.throwException(exec, createTypeError(exec, message));
     825}
     826
     827void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
     828{
     829    VM& vm = exec->vm();
     830    NativeCallFrameTracer tracer(&vm, exec);
     831
     832    vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
     833}
     834
     835#if ENABLE(DFG_JIT)
     836char* JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
     837{
     838    VM& vm = exec->vm();
     839    NativeCallFrameTracer tracer(&vm, exec);
     840
     841    // Defer GC so that it doesn't run between when we enter into this slow path and
     842    // when we figure out the state of our code block. This prevents a number of
     843    // awkward reentrancy scenarios, including:
     844    //
     845    // - The optimized version of our code block being jettisoned by GC right after
     846    //   we concluded that we wanted to use it.
     847    //
     848    // - An optimized version of our code block being installed just as we decided
     849    //   that it wasn't ready yet.
     850    //
     851    // This still leaves the following: anytime we return from cti_optimize, we may
     852    // GC, and the GC may either jettison the optimized version of our code block,
     853    // or it may install the optimized version of our code block even though we
     854    // concluded that it wasn't ready yet.
     855    //
     856    // Note that jettisoning won't happen if we already initiated OSR, because in
     857    // that case we would have already planted the optimized code block into the JS
     858    // stack.
     859    DeferGC deferGC(vm.heap);
     860   
     861    CodeBlock* codeBlock = exec->codeBlock();
     862
     863    if (bytecodeIndex) {
     864        // If we're attempting to OSR from a loop, assume that this should be
     865        // separately optimized.
     866        codeBlock->m_shouldAlwaysBeInlined = false;
     867    }
     868
     869    if (Options::verboseOSR()) {
     870        dataLog(
     871            *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
     872            ", executeCounter = ", codeBlock->jitExecuteCounter(),
     873            ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
     874            ", exitCounter = ");
     875        if (codeBlock->hasOptimizedReplacement())
     876            dataLog(codeBlock->replacement()->osrExitCounter());
     877        else
     878            dataLog("N/A");
     879        dataLog("\n");
     880    }
     881
     882    if (!codeBlock->checkIfOptimizationThresholdReached()) {
     883        codeBlock->updateAllPredictions();
     884        if (Options::verboseOSR())
     885            dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
     886        return 0;
     887    }
     888   
     889    if (codeBlock->m_shouldAlwaysBeInlined) {
     890        codeBlock->updateAllPredictions();
     891        codeBlock->optimizeAfterWarmUp();
     892        if (Options::verboseOSR())
     893            dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
     894        return 0;
     895    }
     896
     897    // We cannot be in the process of asynchronous compilation and also have an optimized
     898    // replacement.
     899    ASSERT(
     900        !vm.worklist
     901        || !(vm.worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
     902        && codeBlock->hasOptimizedReplacement()));
     903
     904    DFG::Worklist::State worklistState;
     905    if (vm.worklist) {
     906        // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
     907        // (i.e. compiled) code blocks. But if it completes ours, we also need to know
     908        // what the result was so that we don't plow ahead and attempt OSR or immediate
     909        // reoptimization. This will have already also set the appropriate JIT execution
     910        // count threshold depending on what happened, so if the compilation was anything
     911        // but successful we just want to return early. See the case for worklistState ==
     912        // DFG::Worklist::Compiled, below.
     913       
     914        // Note that we could have alternatively just called Worklist::compilationState()
     915        // here, and if it returned Compiled, we could have then called
     916        // completeAndScheduleOSR() below. But that would have meant that it could take
     917        // longer for code blocks to be completed: they would only complete when *their*
     918        // execution count trigger fired; but that could take a while since the firing is
     919        // racy. It could also mean that code blocks that never run again after being
     920        // compiled would sit on the worklist until next GC. That's fine, but it's
     921        // probably a waste of memory. Our goal here is to complete code blocks as soon as
     922        // possible in order to minimize the chances of us executing baseline code after
     923        // optimized code is already available.
     924        worklistState = vm.worklist->completeAllReadyPlansForVM(
     925            vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
     926    } else
     927        worklistState = DFG::Worklist::NotKnown;
     928
     929    if (worklistState == DFG::Worklist::Compiling) {
     930        // We cannot be in the process of asynchronous compilation and also have an optimized
     931        // replacement.
     932        RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
     933        codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
     934        return 0;
     935    }
     936
     937    if (worklistState == DFG::Worklist::Compiled) {
     938        // If we don't have an optimized replacement but we did just get compiled, then
     939        // the compilation failed or was invalidated, in which case the execution count
     940        // thresholds have already been set appropriately by
     941        // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
     942        // nothing left to do.
     943        if (!codeBlock->hasOptimizedReplacement()) {
     944            codeBlock->updateAllPredictions();
     945            if (Options::verboseOSR())
     946                dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
     947            return 0;
     948        }
     949    } else if (codeBlock->hasOptimizedReplacement()) {
     950        if (Options::verboseOSR())
     951            dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
     952        // If we have an optimized replacement, then it must be the case that we entered
     953        // cti_optimize from a loop. That's because if there's an optimized replacement,
     954        // then all calls to this function will be relinked to the replacement and so
     955        // the prologue OSR will never fire.
     956       
     957        // This is an interesting threshold check. Consider that a function OSR exits
     958        // in the middle of a loop, while having a relatively low exit count. The exit
     959        // will reset the execution counter to some target threshold, meaning that this
     960        // code won't be reached until that loop heats up for >=1000 executions. But then
     961        // we do a second check here, to see if we should either reoptimize, or just
     962        // attempt OSR entry. Hence it might even be correct for
     963        // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
     964        // additional checking anyway, to reduce the amount of recompilation thrashing.
     965        if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
     966            if (Options::verboseOSR()) {
     967                dataLog(
     968                    "Triggering reoptimization of ", *codeBlock,
     969                    "(", *codeBlock->replacement(), ") (in loop).\n");
     970            }
     971            codeBlock->reoptimize();
     972            return 0;
     973        }
     974    } else {
     975        if (!codeBlock->shouldOptimizeNow()) {
     976            if (Options::verboseOSR()) {
     977                dataLog(
     978                    "Delaying optimization for ", *codeBlock,
     979                    " because of insufficient profiling.\n");
     980            }
     981            return 0;
     982        }
     983
     984        if (Options::verboseOSR())
     985            dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
     986
     987        unsigned numVarsWithValues;
     988        if (bytecodeIndex)
     989            numVarsWithValues = codeBlock->m_numVars;
     990        else
     991            numVarsWithValues = 0;
     992        Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
     993        for (size_t i = 0; i < mustHandleValues.size(); ++i) {
     994            int operand = mustHandleValues.operandForIndex(i);
     995            if (operandIsArgument(operand)
     996                && !VirtualRegister(operand).toArgument()
     997                && codeBlock->codeType() == FunctionCode
     998                && codeBlock->specializationKind() == CodeForConstruct) {
     999                // Ugh. If we're in a constructor, the 'this' argument may hold garbage. It will
     1000                // also never be used. It doesn't matter what we put into the value for this,
     1001                // but it has to be an actual value that can be grokked by subsequent DFG passes,
     1002                // so we sanitize it here by turning it into Undefined.
     1003                mustHandleValues[i] = jsUndefined();
     1004            } else
     1005                mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
     1006        }
     1007
     1008        CompilationResult result = DFG::compile(
     1009            vm, codeBlock->newReplacement().get(), DFG::DFGMode, bytecodeIndex,
     1010            mustHandleValues, JITToDFGDeferredCompilationCallback::create(),
     1011            vm.ensureWorklist());
     1012       
     1013        if (result != CompilationSuccessful)
     1014            return 0;
     1015    }
     1016   
     1017    CodeBlock* optimizedCodeBlock = codeBlock->replacement();
     1018    ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
     1019   
     1020    if (void* address = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
     1021        if (Options::verboseOSR()) {
     1022            dataLog(
     1023                "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
     1024                RawPointer(OUR_RETURN_ADDRESS), " -> ", RawPointer(address), ".\n");
     1025        }
     1026
     1027        codeBlock->optimizeSoon();
     1028        return static_cast<char*>(address);
     1029    }
     1030
     1031    if (Options::verboseOSR()) {
     1032        dataLog(
     1033            "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
     1034            " succeeded, OSR failed, after a delay of ",
     1035            codeBlock->optimizationDelayCounter(), ".\n");
     1036    }
     1037
     1038    // Count the OSR failure as a speculation failure. If this happens a lot, then
     1039    // reoptimize.
     1040    optimizedCodeBlock->countOSRExit();
     1041
     1042    // We are a lot more conservative about triggering reoptimization after OSR failure than
     1043    // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
     1044    // already, then we really would like to reoptimize immediately. But this case covers
     1045    // something else: there weren't many (or any) speculation failures before, but we just
     1046    // failed to enter the speculative code because some variable had the wrong value or
     1047    // because the OSR code decided for any spurious reason that it did not want to OSR
     1048    // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
     1049    // reoptimization trigger.
     1050    if (optimizedCodeBlock->shouldReoptimizeNow()) {
     1051        if (Options::verboseOSR()) {
     1052            dataLog(
     1053                "Triggering reoptimization of ", *codeBlock, " -> ",
     1054                *codeBlock->replacement(), " (after OSR fail).\n");
     1055        }
     1056        codeBlock->reoptimize();
     1057        return 0;
     1058    }
     1059
     1060    // OSR failed this time, but it might succeed next time! Let the code run a bit
     1061    // longer and then try again.
     1062    codeBlock->optimizeAfterWarmUp();
     1063   
     1064    return 0;
     1065}
     1066#endif
     1067
     1068void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
     1069{
     1070    VM& vm = exec->vm();
     1071    NativeCallFrameTracer tracer(&vm, exec);
     1072
     1073    JSValue arrayValue = JSValue::decode(encodedArrayValue);
     1074    ASSERT(isJSArray(arrayValue));
     1075    asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
     1076}
     1077
     1078#if USE(JSVALUE64)
     1079void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
     1080{
     1081    VM& vm = exec->vm();
     1082    NativeCallFrameTracer tracer(&vm, exec);
     1083
     1084    ASSERT(JSValue::decode(encodedObjectValue).isObject());
     1085    JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
     1086
     1087    GetterSetter* accessor = GetterSetter::create(vm);
     1088
     1089    JSValue getter = JSValue::decode(encodedGetterValue);
     1090    JSValue setter = JSValue::decode(encodedSetterValue);
     1091    ASSERT(getter.isObject() || getter.isUndefined());
     1092    ASSERT(setter.isObject() || setter.isUndefined());
     1093    ASSERT(getter.isObject() || setter.isObject());
     1094
     1095    if (!getter.isUndefined())
     1096        accessor->setGetter(vm, asObject(getter));
     1097    if (!setter.isUndefined())
     1098        accessor->setSetter(vm, asObject(setter));
     1099    baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
     1100}
     1101#else
     1102void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter, JSCell* setter)
     1103{
     1104    VM& vm = exec->vm();
     1105    NativeCallFrameTracer tracer(&vm, exec);
     1106
     1107    ASSERT(object && object->isObject());
     1108    JSObject* baseObj = object->getObject();
     1109
     1110    GetterSetter* accessor = GetterSetter::create(vm);
     1111
     1112    ASSERT(!getter || getter->isObject());
     1113    ASSERT(!setter || setter->isObject());
     1114    ASSERT(getter || setter);
     1115
     1116    if (getter)
     1117        accessor->setGetter(vm, getter->getObject());
     1118    if (setter)
     1119        accessor->setSetter(vm, setter->getObject());
     1120    baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
     1121}
     1122#endif
     1123
     1124void JIT_OPERATION operationPushNameScope(ExecState* exec, Identifier* identifier, EncodedJSValue encodedValue, int32_t attibutes)
     1125{
     1126    VM& vm = exec->vm();
     1127    NativeCallFrameTracer tracer(&vm, exec);
     1128
     1129    JSNameScope* scope = JSNameScope::create(exec, *identifier, JSValue::decode(encodedValue), attibutes);
     1130
     1131    exec->setScope(scope);
     1132}
     1133
     1134void JIT_OPERATION operationPushWithScope(ExecState* exec, EncodedJSValue encodedValue)
     1135{
     1136    VM& vm = exec->vm();
     1137    NativeCallFrameTracer tracer(&vm, exec);
     1138
     1139    JSObject* o = JSValue::decode(encodedValue).toObject(exec);
     1140    if (vm.exception())
     1141        return;
     1142
     1143    exec->setScope(JSWithScope::create(exec, o));
     1144}
     1145
     1146void JIT_OPERATION operationPopScope(ExecState* exec)
     1147{
     1148    VM& vm = exec->vm();
     1149    NativeCallFrameTracer tracer(&vm, exec);
     1150
     1151    exec->setScope(exec->scope()->next());
     1152}
     1153
     1154void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
     1155{
     1156    VM& vm = exec->vm();
     1157    NativeCallFrameTracer tracer(&vm, exec);
     1158
     1159    if (LegacyProfiler* profiler = vm.enabledProfiler())
     1160        profiler->didExecute(exec, JSValue::decode(encodedValue));
     1161}
     1162
     1163void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
     1164{
     1165    VM& vm = exec->vm();
     1166    NativeCallFrameTracer tracer(&vm, exec);
     1167
     1168    if (LegacyProfiler* profiler = vm.enabledProfiler())
     1169        profiler->willExecute(exec, JSValue::decode(encodedValue));
     1170}
     1171
    7991172EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
    8001173{
     
    8361209    ASSERT(!vm.exception());
    8371210    return result;
     1211}
     1212
     1213void JIT_OPERATION operationTearOffActivation(ExecState* exec, JSCell* activationCell)
     1214{
     1215    VM& vm = exec->vm();
     1216    NativeCallFrameTracer tracer(&vm, exec);
     1217
     1218    ASSERT(exec->codeBlock()->needsFullScopeChain());
     1219    jsCast<JSActivation*>(activationCell)->tearOff(vm);
     1220}
     1221
     1222void JIT_OPERATION operationTearOffArguments(ExecState* exec, JSCell* argumentsCell, JSCell* activationCell)
     1223{
     1224    ASSERT(exec->codeBlock()->usesArguments());
     1225    if (activationCell) {
     1226        jsCast<Arguments*>(argumentsCell)->didTearOffActivation(exec, jsCast<JSActivation*>(activationCell));
     1227        return;
     1228    }
     1229    jsCast<Arguments*>(argumentsCell)->tearOff(exec);
    8381230}
    8391231
  • trunk/Source/JavaScriptCore/jit/JITOperations.h

    r157451 r157457  
    6161    J: EncodedJSValue
    6262    Jcp: const JSValue*
     63    Jsa: JSActivation*
    6364    Jss: JSString*
    6465    O: JSObject*
     
    123124typedef size_t JIT_OPERATION (*S_JITOperation_J)(EncodedJSValue);
    124125typedef void JIT_OPERATION (*V_JITOperation_E)(ExecState*);
    125 typedef void JIT_OPERATION (*V_JITOperation_EOZD)(ExecState*, JSObject*, int32_t, double);
    126 typedef void JIT_OPERATION (*V_JITOperation_EOZJ)(ExecState*, JSObject*, int32_t, EncodedJSValue);
     126typedef void JIT_OPERATION (*V_JITOperation_EC)(ExecState*, JSCell*);
    127127typedef void JIT_OPERATION (*V_JITOperation_ECb)(ExecState*, CodeBlock*);
    128 typedef void JIT_OPERATION (*V_JITOperation_EC)(ExecState*, JSCell*);
     128typedef void JIT_OPERATION (*V_JITOperation_ECC)(ExecState*, JSCell*, JSCell*);
    129129typedef void JIT_OPERATION (*V_JITOperation_ECIcf)(ExecState*, JSCell*, InlineCallFrame*);
     130typedef void JIT_OPERATION (*V_JITOperation_ECICC)(ExecState*, JSCell*, Identifier*, JSCell*, JSCell*);
    130131typedef void JIT_OPERATION (*V_JITOperation_ECCIcf)(ExecState*, JSCell*, JSCell*, InlineCallFrame*);
    131132typedef void JIT_OPERATION (*V_JITOperation_ECJJ)(ExecState*, JSCell*, EncodedJSValue, EncodedJSValue);
    132133typedef void JIT_OPERATION (*V_JITOperation_ECZ)(ExecState*, JSCell*, int32_t);
    133134typedef void JIT_OPERATION (*V_JITOperation_ECC)(ExecState*, JSCell*, JSCell*);
     135typedef void JIT_OPERATION (*V_JITOperation_EIdJZ)(ExecState*, Identifier*, EncodedJSValue, int32_t);
     136typedef void JIT_OPERATION (*V_JITOperation_EJ)(ExecState*, EncodedJSValue);
     137typedef void JIT_OPERATION (*V_JITOperation_EJCI)(ExecState*, EncodedJSValue, JSCell*, StringImpl*);
     138typedef void JIT_OPERATION (*V_JITOperation_EJIdJJ)(ExecState*, EncodedJSValue, Identifier*, EncodedJSValue, EncodedJSValue);
    134139typedef void JIT_OPERATION (*V_JITOperation_EJJI)(ExecState*, EncodedJSValue, EncodedJSValue, StringImpl*);
    135140typedef void JIT_OPERATION (*V_JITOperation_EJJJ)(ExecState*, EncodedJSValue, EncodedJSValue, EncodedJSValue);
    136141typedef void JIT_OPERATION (*V_JITOperation_EJPP)(ExecState*, EncodedJSValue, void*, void*);
     142typedef void JIT_OPERATION (*V_JITOperation_EJZJ)(ExecState*, EncodedJSValue, int32_t, EncodedJSValue);
     143typedef void JIT_OPERATION (*V_JITOperation_EJZ)(ExecState*, EncodedJSValue, int32_t);
     144typedef void JIT_OPERATION (*V_JITOperation_EOZD)(ExecState*, JSObject*, int32_t, double);
     145typedef void JIT_OPERATION (*V_JITOperation_EOZJ)(ExecState*, JSObject*, int32_t, EncodedJSValue);
    137146typedef void JIT_OPERATION (*V_JITOperation_EPc)(ExecState*, Instruction*);
    138147typedef void JIT_OPERATION (*V_JITOperation_EPZJ)(ExecState*, void*, int32_t, EncodedJSValue);
    139148typedef void JIT_OPERATION (*V_JITOperation_W)(WatchpointSet*);
     149typedef void JIT_OPERATION (*V_JITOperation_EZ)(ExecState*, int32_t);
    140150typedef char* JIT_OPERATION (*P_JITOperation_E)(ExecState*);
    141151typedef char* JIT_OPERATION (*P_JITOperation_EC)(ExecState*, JSCell*);
     
    152162typedef char* JIT_OPERATION (*P_JITOperation_EStSS)(ExecState*, Structure*, size_t, size_t);
    153163typedef char* JIT_OPERATION (*P_JITOperation_EStZ)(ExecState*, Structure*, int32_t);
     164typedef char* JIT_OPERATION (*P_JITOperation_EZ)(ExecState*, int32_t);
    154165typedef char* JIT_OPERATION (*P_JITOperation_EZZ)(ExecState*, int32_t, int32_t);
    155166typedef StringImpl* JIT_OPERATION (*I_JITOperation_EJss)(ExecState*, JSString*);
     
    228239char* JIT_OPERATION operationVirtualConstruct(ExecState*) WTF_INTERNAL;
    229240char* JIT_OPERATION operationLinkConstruct(ExecState*) WTF_INTERNAL;
    230 size_t JIT_OPERATION operationCompareLess(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
    231 size_t JIT_OPERATION operationCompareLessEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
    232 size_t JIT_OPERATION operationCompareGreater(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
    233 size_t JIT_OPERATION operationCompareGreaterEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
     241size_t JIT_OPERATION operationCompareLess(ExecState*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
     242size_t JIT_OPERATION operationCompareLessEq(ExecState*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
     243size_t JIT_OPERATION operationCompareGreater(ExecState*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
     244size_t JIT_OPERATION operationCompareGreaterEq(ExecState*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
    234245size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState*, EncodedJSValue) WTF_INTERNAL;
    235246size_t JIT_OPERATION operationCompareEq(ExecState*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
     
    246257JSCell* JIT_OPERATION operationNewObject(ExecState*, Structure*) WTF_INTERNAL;
    247258EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState*, void*) WTF_INTERNAL;
    248 
     259void JIT_OPERATION operationHandleWatchdogTimer(ExecState*) WTF_INTERNAL;
     260void JIT_OPERATION operationThrowStaticError(ExecState*, EncodedJSValue, int32_t) WTF_INTERNAL;
     261void JIT_OPERATION operationDebug(ExecState*, int32_t) WTF_INTERNAL;
     262#if ENABLE(DFG_JIT)
     263char* JIT_OPERATION operationOptimize(ExecState*, int32_t) WTF_INTERNAL;
     264#endif
     265void JIT_OPERATION operationPutByIndex(ExecState*, EncodedJSValue, int32_t, EncodedJSValue);
     266#if USE(JSVALUE64)
     267void JIT_OPERATION operationPutGetterSetter(ExecState*, EncodedJSValue, Identifier*, EncodedJSValue, EncodedJSValue) WTF_INTERNAL;
     268#else
     269void JIT_OPERATION operationPutGetterSetter(ExecState*, JSCell*, Identifier*, JSCell*, JSCell*) WTF_INTERNAL;
     270#endif
     271void JIT_OPERATION operationPushNameScope(ExecState*, Identifier*, EncodedJSValue, int32_t) WTF_INTERNAL;
     272void JIT_OPERATION operationPushWithScope(ExecState*, EncodedJSValue) WTF_INTERNAL;
     273void JIT_OPERATION operationPopScope(ExecState*) WTF_INTERNAL;
     274void JIT_OPERATION operationProfileDidCall(ExecState*, EncodedJSValue) WTF_INTERNAL;
     275void JIT_OPERATION operationProfileWillCall(ExecState*, EncodedJSValue) WTF_INTERNAL;
    249276EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState*, EncodedJSValue, EncodedJSValue baseVal) WTF_INTERNAL;
    250277JSCell* JIT_OPERATION operationCreateActivation(ExecState*, int32_t offset) WTF_INTERNAL;
    251278JSCell* JIT_OPERATION operationCreateArguments(ExecState*) WTF_INTERNAL;
     279void JIT_OPERATION operationTearOffActivation(ExecState*, JSCell*) WTF_INTERNAL;
     280void JIT_OPERATION operationTearOffArguments(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
    252281EncodedJSValue JIT_OPERATION operationDeleteById(ExecState*, EncodedJSValue base, const Identifier*) WTF_INTERNAL;
    253282JSCell* JIT_OPERATION operationGetPNames(ExecState*, JSObject*) WTF_INTERNAL;
  • trunk/Source/JavaScriptCore/jit/JITPropertyAccess.cpp

    r157439 r157457  
    490490void JIT::emit_op_put_by_index(Instruction* currentInstruction)
    491491{
    492     JITStubCall stubCall(this, cti_op_put_by_index);
    493     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    494     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
    495     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
    496     stubCall.call();
     492    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
     493    emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
     494    callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
    497495}
    498496
    499497void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
    500498{
    501     JITStubCall stubCall(this, cti_op_put_getter_setter);
    502     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    503     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    504     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
    505     stubCall.addArgument(currentInstruction[4].u.operand, regT2);
    506     stubCall.call();
     499    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
     500    emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
     501    emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
     502    callOperation(operationPutGetterSetter, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), regT1, regT2);
    507503}
    508504
  • trunk/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp

    r157439 r157457  
    5858    int property = currentInstruction[2].u.operand;
    5959    int value = currentInstruction[3].u.operand;
    60    
    61     JITStubCall stubCall(this, cti_op_put_by_index);
    62     stubCall.addArgument(base);
    63     stubCall.addArgument(TrustedImm32(property));
    64     stubCall.addArgument(value);
    65     stubCall.call();
     60
     61    emitLoad(base, regT1, regT0);
     62    emitLoad(value, regT3, regT2);
     63    callOperation(operationPutByIndex, regT1, regT0, property, regT3, regT2);
    6664}
    6765
     
    7270    int getter = currentInstruction[3].u.operand;
    7371    int setter = currentInstruction[4].u.operand;
    74    
    75     JITStubCall stubCall(this, cti_op_put_getter_setter);
    76     stubCall.addArgument(base);
    77     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
    78     stubCall.addArgument(getter);
    79     stubCall.addArgument(setter);
    80     stubCall.call();
     72
     73    emitLoadPayload(base, regT1);
     74    emitLoadPayload(getter, regT3);
     75    emitLoadPayload(setter, regT4);
     76    callOperation(operationPutGetterSetter, regT1, &m_codeBlock->identifier(property), regT3, regT4);
    8177}
    8278
  • trunk/Source/JavaScriptCore/jit/JITStubs.cpp

    r157439 r157457  
    369369#endif
    370370
    371 DEFINE_STUB_FUNCTION(void, handle_watchdog_timer)
    372 {
    373     STUB_INIT_STACK_FRAME(stackFrame);
    374     CallFrame* callFrame = stackFrame.callFrame;
    375     VM* vm = stackFrame.vm;
    376     if (UNLIKELY(vm->watchdog.didFire(callFrame))) {
    377         vm->throwException(callFrame, createTerminatedExecutionException(vm));
    378         VM_THROW_EXCEPTION_AT_END();
    379         return;
    380     }
    381 }
    382 
    383371DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
    384372{
     
    660648}
    661649
    662 #if ENABLE(DFG_JIT)
    663 DEFINE_STUB_FUNCTION(void, optimize)
    664 {
    665     STUB_INIT_STACK_FRAME(stackFrame);
    666    
    667     // Defer GC so that it doesn't run between when we enter into this slow path and
    668     // when we figure out the state of our code block. This prevents a number of
    669     // awkward reentrancy scenarios, including:
    670     //
    671     // - The optimized version of our code block being jettisoned by GC right after
    672     //   we concluded that we wanted to use it.
    673     //
    674     // - An optimized version of our code block being installed just as we decided
    675     //   that it wasn't ready yet.
    676     //
    677     // This still leaves the following: anytime we return from cti_optimize, we may
    678     // GC, and the GC may either jettison the optimized version of our code block,
    679     // or it may install the optimized version of our code block even though we
    680     // concluded that it wasn't ready yet.
    681     //
    682     // Note that jettisoning won't happen if we already initiated OSR, because in
    683     // that case we would have already planted the optimized code block into the JS
    684     // stack.
    685     DeferGC deferGC(stackFrame.vm->heap);
    686    
    687     CallFrame* callFrame = stackFrame.callFrame;
    688     CodeBlock* codeBlock = callFrame->codeBlock();
    689     VM& vm = callFrame->vm();
    690     unsigned bytecodeIndex = stackFrame.args[0].int32();
    691 
    692     if (bytecodeIndex) {
    693         // If we're attempting to OSR from a loop, assume that this should be
    694         // separately optimized.
    695         codeBlock->m_shouldAlwaysBeInlined = false;
    696     }
    697    
    698     if (Options::verboseOSR()) {
    699         dataLog(
    700             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
    701             ", executeCounter = ", codeBlock->jitExecuteCounter(),
    702             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
    703             ", exitCounter = ");
    704         if (codeBlock->hasOptimizedReplacement())
    705             dataLog(codeBlock->replacement()->osrExitCounter());
    706         else
    707             dataLog("N/A");
    708         dataLog("\n");
    709     }
    710 
    711     if (!codeBlock->checkIfOptimizationThresholdReached()) {
    712         codeBlock->updateAllPredictions();
    713         if (Options::verboseOSR())
    714             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
    715         return;
    716     }
    717    
    718     if (codeBlock->m_shouldAlwaysBeInlined) {
    719         codeBlock->updateAllPredictions();
    720         codeBlock->optimizeAfterWarmUp();
    721         if (Options::verboseOSR())
    722             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
    723         return;
    724     }
    725    
    726     // We cannot be in the process of asynchronous compilation and also have an optimized
    727     // replacement.
    728     ASSERT(
    729         !vm.worklist
    730         || !(vm.worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
    731              && codeBlock->hasOptimizedReplacement()));
    732    
    733     DFG::Worklist::State worklistState;
    734     if (vm.worklist) {
    735         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
    736         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
    737         // what the result was so that we don't plow ahead and attempt OSR or immediate
    738         // reoptimization. This will have already also set the appropriate JIT execution
    739         // count threshold depending on what happened, so if the compilation was anything
    740         // but successful we just want to return early. See the case for worklistState ==
    741         // DFG::Worklist::Compiled, below.
    742        
    743         // Note that we could have alternatively just called Worklist::compilationState()
    744         // here, and if it returned Compiled, we could have then called
    745         // completeAndScheduleOSR() below. But that would have meant that it could take
    746         // longer for code blocks to be completed: they would only complete when *their*
    747         // execution count trigger fired; but that could take a while since the firing is
    748         // racy. It could also mean that code blocks that never run again after being
    749         // compiled would sit on the worklist until next GC. That's fine, but it's
    750         // probably a waste of memory. Our goal here is to complete code blocks as soon as
    751         // possible in order to minimize the chances of us executing baseline code after
    752         // optimized code is already available.
    753        
    754         worklistState = vm.worklist->completeAllReadyPlansForVM(
    755             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
    756     } else
    757         worklistState = DFG::Worklist::NotKnown;
    758    
    759     if (worklistState == DFG::Worklist::Compiling) {
    760         // We cannot be in the process of asynchronous compilation and also have an optimized
    761         // replacement.
    762         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
    763         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
    764         return;
    765     }
    766    
    767     if (worklistState == DFG::Worklist::Compiled) {
    768         // If we don't have an optimized replacement but we did just get compiled, then
    769         // the compilation failed or was invalidated, in which case the execution count
    770         // thresholds have already been set appropriately by
    771         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
    772         // nothing left to do.
    773         if (!codeBlock->hasOptimizedReplacement()) {
    774             codeBlock->updateAllPredictions();
    775             if (Options::verboseOSR())
    776                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
    777             return;
    778         }
    779     } else if (codeBlock->hasOptimizedReplacement()) {
    780         if (Options::verboseOSR())
    781             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
    782         // If we have an optimized replacement, then it must be the case that we entered
    783         // cti_optimize from a loop. That's because if there's an optimized replacement,
    784         // then all calls to this function will be relinked to the replacement and so
    785         // the prologue OSR will never fire.
    786        
    787         // This is an interesting threshold check. Consider that a function OSR exits
    788         // in the middle of a loop, while having a relatively low exit count. The exit
    789         // will reset the execution counter to some target threshold, meaning that this
    790         // code won't be reached until that loop heats up for >=1000 executions. But then
    791         // we do a second check here, to see if we should either reoptimize, or just
    792         // attempt OSR entry. Hence it might even be correct for
    793         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
    794         // additional checking anyway, to reduce the amount of recompilation thrashing.
    795         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
    796             if (Options::verboseOSR()) {
    797                 dataLog(
    798                     "Triggering reoptimization of ", *codeBlock,
    799                     "(", *codeBlock->replacement(), ") (in loop).\n");
    800             }
    801             codeBlock->reoptimize();
    802             return;
    803         }
    804     } else {
    805         if (!codeBlock->shouldOptimizeNow()) {
    806             if (Options::verboseOSR()) {
    807                 dataLog(
    808                     "Delaying optimization for ", *codeBlock,
    809                     " because of insufficient profiling.\n");
    810             }
    811             return;
    812         }
    813        
    814         if (Options::verboseOSR())
    815             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
    816        
    817         unsigned numVarsWithValues;
    818         if (bytecodeIndex)
    819             numVarsWithValues = codeBlock->m_numVars;
    820         else
    821             numVarsWithValues = 0;
    822         Operands<JSValue> mustHandleValues(
    823             codeBlock->numParameters(), numVarsWithValues);
    824         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
    825             int operand = mustHandleValues.operandForIndex(i);
    826             if (operandIsArgument(operand)
    827                 && !VirtualRegister(operand).toArgument()
    828                 && codeBlock->codeType() == FunctionCode
    829                 && codeBlock->specializationKind() == CodeForConstruct) {
    830                 // Ugh. If we're in a constructor, the 'this' argument may hold garbage. It will
    831                 // also never be used. It doesn't matter what we put into the value for this,
    832                 // but it has to be an actual value that can be grokked by subsequent DFG passes,
    833                 // so we sanitize it here by turning it into Undefined.
    834                 mustHandleValues[i] = jsUndefined();
    835             } else
    836                 mustHandleValues[i] = callFrame->uncheckedR(operand).jsValue();
    837         }
    838        
    839         CompilationResult result = DFG::compile(
    840             vm, codeBlock->newReplacement().get(), DFG::DFGMode, bytecodeIndex,
    841             mustHandleValues, JITToDFGDeferredCompilationCallback::create(),
    842             vm.ensureWorklist());
    843        
    844         if (result != CompilationSuccessful)
    845             return;
    846     }
    847    
    848     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
    849     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
    850    
    851     if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
    852         if (Options::verboseOSR()) {
    853             dataLog(
    854                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
    855                 RawPointer((STUB_RETURN_ADDRESS).value()), " -> ", RawPointer(address), ".\n");
    856         }
    857 
    858         codeBlock->optimizeSoon();
    859         STUB_SET_RETURN_ADDRESS(address);
    860         return;
    861     }
    862 
    863     if (Options::verboseOSR()) {
    864         dataLog(
    865             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
    866             " succeeded, OSR failed, after a delay of ",
    867             codeBlock->optimizationDelayCounter(), ".\n");
    868     }
    869 
    870     // Count the OSR failure as a speculation failure. If this happens a lot, then
    871     // reoptimize.
    872     optimizedCodeBlock->countOSRExit();
    873    
    874     // We are a lot more conservative about triggering reoptimization after OSR failure than
    875     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
    876     // already, then we really would like to reoptimize immediately. But this case covers
    877     // something else: there weren't many (or any) speculation failures before, but we just
    878     // failed to enter the speculative code because some variable had the wrong value or
    879     // because the OSR code decided for any spurious reason that it did not want to OSR
    880     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
    881     // reoptimization trigger.
    882     if (optimizedCodeBlock->shouldReoptimizeNow()) {
    883         if (Options::verboseOSR()) {
    884             dataLog(
    885                 "Triggering reoptimization of ", *codeBlock, " -> ",
    886                 *codeBlock->replacement(), " (after OSR fail).\n");
    887         }
    888         codeBlock->reoptimize();
    889         return;
    890     }
    891 
    892     // OSR failed this time, but it might succeed next time! Let the code run a bit
    893     // longer and then try again.
    894     codeBlock->optimizeAfterWarmUp();
    895 }
    896 #endif // ENABLE(DFG_JIT)
    897 
    898650DEFINE_STUB_FUNCTION(void, op_tear_off_activation)
    899651{
     
    916668    }
    917669    arguments->tearOff(callFrame);
    918 }
    919 
    920 DEFINE_STUB_FUNCTION(void, op_profile_will_call)
    921 {
    922     STUB_INIT_STACK_FRAME(stackFrame);
    923 
    924     if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
    925         profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
    926 }
    927 
    928 DEFINE_STUB_FUNCTION(void, op_profile_did_call)
    929 {
    930     STUB_INIT_STACK_FRAME(stackFrame);
    931 
    932     if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
    933         profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
    934670}
    935671
     
    1148884}
    1149885
    1150 DEFINE_STUB_FUNCTION(void, op_push_with_scope)
    1151 {
    1152     STUB_INIT_STACK_FRAME(stackFrame);
    1153 
    1154     JSObject* o = stackFrame.args[0].jsValue().toObject(stackFrame.callFrame);
    1155     CHECK_FOR_EXCEPTION_VOID();
    1156     stackFrame.callFrame->setScope(JSWithScope::create(stackFrame.callFrame, o));
    1157 }
    1158 
    1159 DEFINE_STUB_FUNCTION(void, op_pop_scope)
    1160 {
    1161     STUB_INIT_STACK_FRAME(stackFrame);
    1162 
    1163     stackFrame.callFrame->setScope(stackFrame.callFrame->scope()->next());
    1164 }
    1165 
    1166 DEFINE_STUB_FUNCTION(void, op_push_name_scope)
    1167 {
    1168     STUB_INIT_STACK_FRAME(stackFrame);
    1169 
    1170     JSNameScope* scope = JSNameScope::create(stackFrame.callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].jsValue(), stackFrame.args[2].int32());
    1171 
    1172     CallFrame* callFrame = stackFrame.callFrame;
    1173     callFrame->setScope(scope);
    1174 }
    1175 
    1176 DEFINE_STUB_FUNCTION(void, op_put_by_index)
    1177 {
    1178     STUB_INIT_STACK_FRAME(stackFrame);
    1179 
    1180     CallFrame* callFrame = stackFrame.callFrame;
    1181     unsigned property = stackFrame.args[1].int32();
    1182 
    1183     JSValue arrayValue = stackFrame.args[0].jsValue();
    1184     ASSERT(isJSArray(arrayValue));
    1185     asArray(arrayValue)->putDirectIndex(callFrame, property, stackFrame.args[2].jsValue());
    1186 }
    1187 
    1188 DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
    1189 {
    1190     STUB_INIT_STACK_FRAME(stackFrame);
    1191 
    1192     CallFrame* callFrame = stackFrame.callFrame;
    1193 
    1194     ASSERT(stackFrame.args[0].jsValue().isObject());
    1195     JSObject* baseObj = asObject(stackFrame.args[0].jsValue());
    1196 
    1197     VM& vm = callFrame->vm();
    1198     GetterSetter* accessor = GetterSetter::create(vm);
    1199 
    1200     JSValue getter = stackFrame.args[2].jsValue();
    1201     JSValue setter = stackFrame.args[3].jsValue();
    1202     ASSERT(getter.isObject() || getter.isUndefined());
    1203     ASSERT(setter.isObject() || setter.isUndefined());
    1204     ASSERT(getter.isObject() || setter.isObject());
    1205 
    1206     if (!getter.isUndefined())
    1207         accessor->setGetter(vm, asObject(getter));
    1208     if (!setter.isUndefined())
    1209         accessor->setSetter(vm, asObject(setter));
    1210     baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
    1211 }
    1212 
    1213886DEFINE_STUB_FUNCTION(void, op_throw_static_error)
    1214887{
     
    1224897}
    1225898
    1226 DEFINE_STUB_FUNCTION(void, op_debug)
    1227 {
    1228     STUB_INIT_STACK_FRAME(stackFrame);
    1229 
    1230     CallFrame* callFrame = stackFrame.callFrame;
    1231 
    1232     int debugHookID = stackFrame.args[0].int32();
    1233     stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID));
    1234 }
    1235 
    1236899DEFINE_STUB_FUNCTION(void*, vm_throw)
    1237900{
  • trunk/Source/JavaScriptCore/jit/JITStubs.h

    r157439 r157457  
    344344EncodedJSValue JIT_STUB cti_op_get_by_val_generic(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    345345EncodedJSValue JIT_STUB cti_op_get_by_val_string(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    346 void JIT_STUB cti_op_push_name_scope(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    347 void JIT_STUB cti_op_push_with_scope(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    348 void JIT_STUB cti_handle_watchdog_timer(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    349 void JIT_STUB cti_op_debug(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    350 void JIT_STUB cti_op_pop_scope(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    351 void JIT_STUB cti_op_profile_did_call(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    352 void JIT_STUB cti_op_profile_will_call(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    353 void JIT_STUB cti_op_put_by_index(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    354346void JIT_STUB cti_op_put_by_val(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    355347void JIT_STUB cti_op_put_by_val_generic(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    356 void JIT_STUB cti_op_put_getter_setter(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    357348void JIT_STUB cti_op_tear_off_activation(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    358349void JIT_STUB cti_op_tear_off_arguments(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    359350void JIT_STUB cti_op_throw_static_error(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    360 #if ENABLE(DFG_JIT)
    361 void JIT_STUB cti_optimize(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    362 #endif
    363351void* JIT_STUB cti_op_throw(STUB_ARGS_DECLARATION) WTF_INTERNAL;
    364352void* JIT_STUB cti_vm_throw(STUB_ARGS_DECLARATION) REFERENCED_FROM_ASM WTF_INTERNAL;
Note: See TracChangeset for help on using the changeset viewer.