Changeset 223989 in webkit


Ignore:
Timestamp:
Oct 25, 2017 3:15:39 PM (6 years ago)
Author:
commit-queue@webkit.org
Message:

Unreviewed, rolling out r223691 and r223729.
https://bugs.webkit.org/show_bug.cgi?id=178834

Broke Speedometer 2 React-Redux-TodoMVC test case (Requested
by rniwa on #webkit).

Reverted changesets:

"Turn recursive tail calls into loops"
https://bugs.webkit.org/show_bug.cgi?id=176601
https://trac.webkit.org/changeset/223691

"REGRESSION(r223691): DFGByteCodeParser.cpp:1483:83: warning:
comparison is always false due to limited range of data type
[-Wtype-limits]"
https://bugs.webkit.org/show_bug.cgi?id=178543
https://trac.webkit.org/changeset/223729

Location:
trunk
Files:
1 deleted
8 edited

Legend:

Unmodified
Added
Removed
  • trunk/JSTests/ChangeLog

    r223961 r223989  
     12017-10-25  Commit Queue  <commit-queue@webkit.org>
     2
     3        Unreviewed, rolling out r223691 and r223729.
     4        https://bugs.webkit.org/show_bug.cgi?id=178834
     5
     6        Broke Speedometer 2 React-Redux-TodoMVC test case (Requested
     7        by rniwa on #webkit).
     8
     9        Reverted changesets:
     10
     11        "Turn recursive tail calls into loops"
     12        https://bugs.webkit.org/show_bug.cgi?id=176601
     13        https://trac.webkit.org/changeset/223691
     14
     15        "REGRESSION(r223691): DFGByteCodeParser.cpp:1483:83: warning:
     16        comparison is always false due to limited range of data type
     17        [-Wtype-limits]"
     18        https://bugs.webkit.org/show_bug.cgi?id=178543
     19        https://trac.webkit.org/changeset/223729
     20
    1212017-10-25  Ryan Haddad  <ryanhaddad@apple.com>
    222
  • trunk/Source/JavaScriptCore/ChangeLog

    r223980 r223989  
     12017-10-25  Commit Queue  <commit-queue@webkit.org>
     2
     3        Unreviewed, rolling out r223691 and r223729.
     4        https://bugs.webkit.org/show_bug.cgi?id=178834
     5
     6        Broke Speedometer 2 React-Redux-TodoMVC test case (Requested
     7        by rniwa on #webkit).
     8
     9        Reverted changesets:
     10
     11        "Turn recursive tail calls into loops"
     12        https://bugs.webkit.org/show_bug.cgi?id=176601
     13        https://trac.webkit.org/changeset/223691
     14
     15        "REGRESSION(r223691): DFGByteCodeParser.cpp:1483:83: warning:
     16        comparison is always false due to limited range of data type
     17        [-Wtype-limits]"
     18        https://bugs.webkit.org/show_bug.cgi?id=178543
     19        https://trac.webkit.org/changeset/223729
     20
    1212017-10-25  Michael Saboff  <msaboff@apple.com>
    222
  • trunk/Source/JavaScriptCore/bytecode/CodeBlock.h

    r223691 r223989  
    743743    // Call this to cause an optimization trigger to fire soon, but
    744744    // not necessarily the next one. This makes sense if optimization
    745     // succeeds. Successful optimization means that all calls are
     745    // succeeds. Successfuly optimization means that all calls are
    746746    // relinked to the optimized code, so this only affects call
    747747    // frames that are still executing this CodeBlock. The value here
     
    925925    std::optional<CodeOrigin> findPC(void* pc);
    926926#endif
    927 
    928     bool hasTailCalls() const { return m_unlinkedCode->hasTailCalls(); }
    929927
    930928protected:
  • trunk/Source/JavaScriptCore/bytecode/PreciseJumpTargets.cpp

    r223691 r223989  
    4343    if (opcodeID == op_loop_hint)
    4444        out.append(bytecodeOffset);
    45     else if (opcodeID == op_enter && codeBlock->hasTailCalls()) {
    46         // We need to insert a jump after op_enter, so recursive tail calls have somewhere to jump to.
    47         // But we only want to pay that price for functions that have at least one tail call.
    48         out.append(bytecodeOffset + opcodeLengths[op_enter]);
    49     }
    5045}
    5146
     
    5954{
    6055    ASSERT(out.isEmpty());
    61 
    62     // The code block has a superset of the jump targets. So if it claims to have none, we are done.
     56   
     57    // We will derive a superset of the jump targets that the code block thinks it has.
     58    // So, if the code block claims there are none, then we are done.
    6359    if (Mode == ComputePreciseJumpTargetsMode::FollowCodeBlockClaim && !codeBlock->numberOfJumpTargets())
    6460        return;
  • trunk/Source/JavaScriptCore/bytecode/UnlinkedCodeBlock.cpp

    r223691 r223989  
    7171    , m_derivedContextType(static_cast<unsigned>(info.derivedContextType()))
    7272    , m_evalContextType(static_cast<unsigned>(info.evalContextType()))
    73     , m_hasTailCalls(false)
    7473    , m_lineCount(0)
    7574    , m_endColumn(UINT_MAX)
  • trunk/Source/JavaScriptCore/bytecode/UnlinkedCodeBlock.h

    r223691 r223989  
    130130    bool isArrowFunctionContext() const { return m_isArrowFunctionContext; }
    131131    bool isClassContext() const { return m_isClassContext; }
    132     bool hasTailCalls() const { return m_hasTailCalls; }
    133     void setHasTailCalls() { m_hasTailCalls = true; }
    134132
    135133    void addExpressionInfo(unsigned instructionOffset, int divot,
     
    445443    unsigned m_derivedContextType : 2;
    446444    unsigned m_evalContextType : 2;
    447     unsigned m_hasTailCalls : 1;
    448445    unsigned m_lineCount;
    449446    unsigned m_endColumn;
  • trunk/Source/JavaScriptCore/bytecompiler/BytecodeGenerator.cpp

    r223691 r223989  
    13141314{
    13151315    emitOpcode(op_enter);
    1316 
    1317     // We must add the end of op_enter as a potential jump target, because the bytecode parser may decide to split its basic block
    1318     // to have somewhere to jump to if there is a recursive tail-call that points to this function.
    1319     m_codeBlock->addJumpTarget(instructions().size());
    1320     // This disables peephole optimizations when an instruction is a jump target
    1321     m_lastOpcodeID = op_end;
    13221316}
    13231317
     
    33543348RegisterID* BytecodeGenerator::emitCallInTailPosition(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
    33553349{
    3356     if (m_inTailPosition) {
    3357         m_codeBlock->setHasTailCalls();
    3358         return emitCall(op_tail_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
    3359     }
    3360     return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
     3350    return emitCall(m_inTailPosition ? op_tail_call : op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
    33613351}
    33623352
  • trunk/Source/JavaScriptCore/dfg/DFGByteCodeParser.cpp

    r223729 r223989  
    225225    void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis);
    226226    Node* getArgumentCount();
    227     bool handleRecursiveTailCall(Node* callTargetNode, const CallLinkStatus&, int registerOffset, VirtualRegister thisArgument, int argumentCountIncludingThis);
    228227    unsigned inliningCost(CallVariant, int argumentCountIncludingThis, InlineCallFrame::Kind); // Return UINT_MAX if it's not an inlining candidate. By convention, intrinsics have a cost of 1.
    229228    // Handle inlining. Return true if it succeeded, false if we need to plant a call.
     
    233232    template<typename ChecksFunctor>
    234233    void inlineCall(Node* callTargetNode, int resultOperand, CallVariant, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, InlineCallFrame::Kind, BasicBlock* continuationBlock, const ChecksFunctor& insertChecks);
     234    void cancelLinkingForBlock(InlineStackEntry*, BasicBlock*); // Only works when the given block is the last one to have been added for that inline stack entry.
    235235    // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
    236236    template<typename ChecksFunctor>
     
    11281128        Vector<BasicBlock*> m_blockLinkingTargets;
    11291129
     1130        // This is set by op_enter in parseBlock(), and indicates the first block of the function.
     1131        BasicBlock* m_entryBlock;
     1132
    11301133        // Optional: a continuation block for returns to jump to. It is set by early returns if it does not exist.
    11311134        BasicBlock* m_continuationBlock;
     
    12151218    Ref<BasicBlock> block = adoptRef(*new BasicBlock(bytecodeIndex, m_numArguments, m_numLocals, 1));
    12161219    BasicBlock* blockPtr = block.ptr();
    1217     // m_blockLinkingTargets must always be sorted in increasing order of bytecodeBegin
    1218     if (m_inlineStackTop->m_blockLinkingTargets.size())
    1219         ASSERT(m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin < bytecodeIndex);
    12201220    m_inlineStackTop->m_blockLinkingTargets.append(blockPtr);
    12211221    m_graph.appendBlock(WTFMove(block));
     
    12351235    ASSERT(block->bytecodeBegin == UINT_MAX);
    12361236    block->bytecodeBegin = bytecodeIndex;
    1237     // m_blockLinkingTargets must always be sorted in increasing order of bytecodeBegin
    1238     if (m_inlineStackTop->m_blockLinkingTargets.size())
    1239         ASSERT(m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin < bytecodeIndex);
    12401237    m_inlineStackTop->m_blockLinkingTargets.append(block);
    12411238}
     
    12981295    if (callLinkStatus.canOptimize()) {
    12991296        VirtualRegister thisArgument = virtualRegisterForArgument(0, registerOffset);
    1300 
    1301         if (op == TailCall && handleRecursiveTailCall(callTarget, callLinkStatus, registerOffset, thisArgument, argumentCountIncludingThis))
    1302             return Terminal;
    13031297
    13041298        // Inlining is quite complex, and managed by a pipeline of functions:
     
    14171411    for (int i = 0; i < argumentCountIncludingThis; ++i)
    14181412        addToGraph(Phantom, get(virtualRegisterForArgument(i, registerOffset)));
    1419 }
    1420 
    1421 bool ByteCodeParser::handleRecursiveTailCall(Node* callTargetNode, const CallLinkStatus& callLinkStatus, int registerOffset, VirtualRegister thisArgument, int argumentCountIncludingThis)
    1422 {
    1423     // FIXME: We currently only do this optimisation in the simple, non-polymorphic case.
    1424     // https://bugs.webkit.org/show_bug.cgi?id=178390
    1425     if (callLinkStatus.couldTakeSlowPath() || callLinkStatus.size() != 1)
    1426         return false;
    1427 
    1428     auto targetExecutable = callLinkStatus[0].executable();
    1429     InlineStackEntry* stackEntry = m_inlineStackTop;
    1430     do {
    1431         if (targetExecutable != stackEntry->executable())
    1432             continue;
    1433         VERBOSE_LOG("   We found a recursive tail call, trying to optimize it into a jump.\n");
    1434 
    1435         if (auto* callFrame = stackEntry->m_inlineCallFrame) {
    1436             // Some code may statically use the argument count from the InlineCallFrame, so it would be invalid to loop back if it does not match.
    1437             // We "continue" instead of returning false in case another stack entry further on the stack has the right number of arguments.
    1438             if (argumentCountIncludingThis != static_cast<int>(callFrame->argumentCountIncludingThis))
    1439                 continue;
    1440         } else {
    1441             // We are in the machine code entry (i.e. the original caller).
    1442             // If we have more arguments than the number of parameters to the function, it is not clear where we could put them on the stack.
    1443             if (argumentCountIncludingThis > m_codeBlock->numParameters())
    1444                 return false;
    1445         }
    1446 
    1447         // We must add some check that the profiling information was correct and the target of this call is what we thought
    1448         emitFunctionChecks(callLinkStatus[0], callTargetNode, thisArgument);
    1449         // We flush everything, as if we were in the backedge of a loop (see treatment of op_jmp in parseBlock).
    1450         flushForTerminal();
    1451 
    1452         // We must set the arguments to the right values
    1453         int argIndex = 0;
    1454         for (; argIndex < argumentCountIncludingThis; ++argIndex) {
    1455             Node* value = get(virtualRegisterForArgument(argIndex, registerOffset));
    1456             setDirect(stackEntry->remapOperand(virtualRegisterForArgument(argIndex)), value, NormalSet);
    1457         }
    1458         Node* undefined = addToGraph(JSConstant, OpInfo(m_constantUndefined));
    1459         for (; argIndex < stackEntry->m_codeBlock->numParameters(); ++argIndex)
    1460             setDirect(stackEntry->remapOperand(virtualRegisterForArgument(argIndex)), undefined, NormalSet);
    1461 
    1462         // We must repeat the work of op_enter here as we will jump right after it.
    1463         // We jump right after it and not before it, because of some invariant saying that a CFG root cannot have predecessors in the IR.
    1464         for (int i = 0; i < stackEntry->m_codeBlock->m_numVars; ++i)
    1465             setDirect(stackEntry->remapOperand(virtualRegisterForLocal(i)), undefined, NormalSet);
    1466 
    1467         // We want to emit the SetLocals with an exit origin that points to the place we are jumping to.
    1468         unsigned oldIndex = m_currentIndex;
    1469         auto oldStackTop = m_inlineStackTop;
    1470         m_inlineStackTop = stackEntry;
    1471         m_currentIndex = OPCODE_LENGTH(op_enter);
    1472         m_exitOK = true;
    1473         processSetLocalQueue();
    1474         m_currentIndex = oldIndex;
    1475         m_inlineStackTop = oldStackTop;
    1476         m_exitOK = false;
    1477 
    1478         BasicBlock** entryBlockPtr = tryBinarySearch<BasicBlock*, unsigned>(stackEntry->m_blockLinkingTargets, stackEntry->m_blockLinkingTargets.size(), OPCODE_LENGTH(op_enter), getBytecodeBeginForBlock);
    1479         RELEASE_ASSERT(entryBlockPtr);
    1480         addJumpTo(*entryBlockPtr);
    1481         return true;
    1482         // It would be unsound to jump over a non-tail call: the "tail" call is not really a tail call in that case.
    1483     } while (stackEntry->m_inlineCallFrame && stackEntry->m_inlineCallFrame->kind == InlineCallFrame::TailCall && (stackEntry = stackEntry->m_caller));
    1484 
    1485     // The tail call was not recursive
    1486     return false;
    14871413}
    14881414
     
    42894215                set(virtualRegisterForLocal(i), undefined, ImmediateNakedSet);
    42904216
     4217            m_inlineStackTop->m_entryBlock = m_currentBlock;
     4218
    42914219            NEXT_OPCODE(op_enter);
    42924220        }
     
    54755403                NEXT_OPCODE(op_tail_call);
    54765404            else
    5477                 LAST_OPCODE_LINKED(op_tail_call);
    5478             // We use LAST_OPCODE_LINKED instead of LAST_OPCODE because if the tail call was optimized, it may now be a jump to a bytecode index in a different InlineStackEntry.
     5405                LAST_OPCODE(op_tail_call);
    54795406        }
    54805407
     
    65046431
    65056432    m_graph.determineReachability();
     6433    if (Options::verboseDFGBytecodeParsing())
     6434        m_graph.dump();
    65066435    m_graph.killUnreachableBlocks();
    65076436
Note: See TracChangeset for help on using the changeset viewer.