Changeset 229364 in webkit


Ignore:
Timestamp:
Mar 7, 2018 10:04:30 AM (6 years ago)
Author:
mark.lam@apple.com
Message:

Add support for ARM64E.
https://bugs.webkit.org/show_bug.cgi?id=183398
<rdar://problem/38212621>

Reviewed by Michael Saboff.

  • assembler/MacroAssembler.h:
  • llint/LLIntOfflineAsmConfig.h:
  • llint/LowLevelInterpreter.asm:
  • llint/LowLevelInterpreter64.asm:
  • offlineasm/backends.rb:
Location:
trunk/Source/JavaScriptCore
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/ChangeLog

    r229363 r229364  
     12018-03-07  Mark Lam  <mark.lam@apple.com>
     2
     3        Add support for ARM64E.
     4        https://bugs.webkit.org/show_bug.cgi?id=183398
     5        <rdar://problem/38212621>
     6
     7        Reviewed by Michael Saboff.
     8
     9        * assembler/MacroAssembler.h:
     10        * llint/LLIntOfflineAsmConfig.h:
     11        * llint/LowLevelInterpreter.asm:
     12        * llint/LowLevelInterpreter64.asm:
     13        * offlineasm/backends.rb:
     14
    1152018-03-07  Yusuke Suzuki  <utatane.tea@gmail.com>
    216
  • trunk/Source/JavaScriptCore/assembler/MacroAssembler.h

    r229087 r229364  
    3535#include "MacroAssemblerARMv7.h"
    3636namespace JSC { typedef MacroAssemblerARMv7 MacroAssemblerBase; };
     37
     38#elif CPU(ARM64E) && __has_include(<WebKitAdditions/MacroAssemblerARM64E.h>)
     39#define TARGET_ASSEMBLER ARM64EAssembler
     40#define TARGET_MACROASSEMBLER MacroAssemblerARM64E
     41#include <WebKitAdditions/MacroAssemblerARM64E.h>
    3742
    3843#elif CPU(ARM64)
  • trunk/Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h

    r229354 r229364  
    119119#if CPU(ARM64E)
    120120#define OFFLINE_ASM_ARM64E 1
     121#undef OFFLINE_ASM_ARM64
     122#define OFFLINE_ASM_ARM64 0 // Pretend that ARM64 and ARM64E are mutually exclusive to please the offlineasm.
    121123#else
    122124#define OFFLINE_ASM_ARM64E 0
  • trunk/Source/JavaScriptCore/llint/LowLevelInterpreter.asm

    r229354 r229364  
    7272#  registers on all architectures.
    7373#
    74 #  - lr is defined on non-X86 architectures (ARM64, ARMv7, ARM,
     74#  - lr is defined on non-X86 architectures (ARM64, ARM64E, ARMv7, ARM,
    7575#  ARMv7_TRADITIONAL, MIPS and CLOOP) and holds the return PC
    7676#
     
    224224const CallOpCodeSize = 9
    225225
    226 if X86_64 or ARM64 or C_LOOP
     226if X86_64 or ARM64 or ARM64E or C_LOOP
    227227    const maxFrameExtentForSlowPathCall = 0
    228228elsif ARM or ARMv7_TRADITIONAL or ARMv7
     
    236236end
    237237
    238 if X86_64 or X86_64_WIN or ARM64
     238if X86_64 or X86_64_WIN or ARM64 or ARM64E
    239239    const CalleeSaveSpaceAsVirtualRegisters = 3
    240240else
     
    279279    #   This requires an add before the call, and a sub after.
    280280    const PC = t4 # When changing this, make sure LLIntPC is up to date in LLIntPCRanges.h
    281     if ARM64
     281    if ARM64 or ARM64E
    282282        const PB = csr7
    283283        const tagTypeNumber = csr8
     
    503503#     )
    504504#
    505 if X86_64 or ARM64
     505if X86_64 or ARM64 or ARM64E
    506506    macro probe(action)
    507507        # save all the registers that the LLInt may use.
    508         if ARM64
     508        if ARM64 or ARM64E
    509509            push cfr, lr
    510510        end
     
    514514        push t2, t3
    515515        push t4, t5
    516         if ARM64
     516        if ARM64 or ARM64E
    517517            push csr0, csr1
    518518            push csr2, csr3
     
    525525
    526526        # restore all the registers we saved previously.
    527         if ARM64
     527        if ARM64 or ARM64E
    528528            pop csr9, csr8
    529529            pop csr7, csr6
     
    537537        pop a3, a2
    538538        pop a1, a0
    539         if ARM64
     539        if ARM64 or ARM64E
    540540            pop lr, cfr
    541541        end
     
    547547
    548548macro checkStackPointerAlignment(tempReg, location)
    549     if ARM64 or C_LOOP
    550         # ARM64 will check for us!
     549    if ARM64 or ARM64E or C_LOOP
     550        # ARM64 and ARM64E will check for us!
    551551        # C_LOOP does not need the alignment, and can use a little perf
    552552        # improvement from avoiding useless work.
     
    566566end
    567567
    568 if C_LOOP or ARM64 or X86_64 or X86_64_WIN
     568if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN
    569569    const CalleeSaveRegisterCount = 0
    570570elsif ARM or ARMv7_TRADITIONAL or ARMv7
     
    583583
    584584macro pushCalleeSaves()
    585     if C_LOOP or ARM64 or X86_64 or X86_64_WIN
     585    if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN
    586586    elsif ARM or ARMv7_TRADITIONAL
    587587        emit "push {r4-r10}"
     
    605605
    606606macro popCalleeSaves()
    607     if C_LOOP or ARM64 or X86_64 or X86_64_WIN
     607    if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN
    608608    elsif ARM or ARMv7_TRADITIONAL
    609609        emit "pop {r4-r10}"
     
    630630    elsif X86 or X86_WIN or X86_64 or X86_64_WIN
    631631        push cfr
    632     elsif ARM64
     632    elsif ARM64 or ARM64E
    633633        push cfr, lr
    634634    else
     
    645645    elsif X86 or X86_WIN or X86_64 or X86_64_WIN
    646646        pop cfr
    647     elsif ARM64
     647    elsif ARM64 or ARM64E
    648648        pop lr, cfr
    649649    end
     
    655655    elsif ARM or ARMv7_TRADITIONAL
    656656    elsif ARMv7
    657     elsif ARM64
     657    elsif ARM64 or ARM64E
    658658        emit "stp x27, x28, [x29, #-16]"
    659659        emit "stp xzr, x26, [x29, #-32]"
     
    676676    elsif ARM or ARMv7_TRADITIONAL
    677677    elsif ARMv7
    678     elsif ARM64
     678    elsif ARM64 or ARM64E
    679679        emit "ldp xzr, x26, [x29, #-32]"
    680680        emit "ldp x27, x28, [x29, #-16]"
     
    694694
    695695macro copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm, temp)
    696     if ARM64 or X86_64 or X86_64_WIN
     696    if ARM64 or ARM64E or X86_64 or X86_64_WIN
    697697        loadp VM::topEntryFrame[vm], temp
    698698        vmEntryRecord(temp, temp)
    699699        leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp
    700         if ARM64
     700        if ARM64 or ARM64E
    701701            storep csr0, [temp]
    702702            storep csr1, 8[temp]
     
    736736
    737737macro restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(vm, temp)
    738     if ARM64 or X86_64 or X86_64_WIN
     738    if ARM64 or ARM64E or X86_64 or X86_64_WIN
    739739        loadp VM::topEntryFrame[vm], temp
    740740        vmEntryRecord(temp, temp)
    741741        leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp
    742         if ARM64
     742        if ARM64 or ARM64E
    743743            loadp [temp], csr0
    744744            loadp 8[temp], csr1
     
    778778
    779779macro preserveReturnAddressAfterCall(destinationRegister)
    780     if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS
     780    if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or ARM64E or MIPS
    781781        # In C_LOOP case, we're only preserving the bytecode vPC.
    782782        move lr, destinationRegister
     
    799799    if X86 or X86_WIN or X86_64 or X86_64_WIN
    800800        push cfr
    801     elsif ARM64
     801    elsif ARM64 or ARM64E
    802802        push cfr, lr
    803803    elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
     
    811811    if X86 or X86_WIN or X86_64 or X86_64_WIN
    812812        pop cfr
    813     elsif ARM64
     813    elsif ARM64 or ARM64E
    814814        pop lr, cfr
    815815    elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
     
    899899    andi ~StackAlignmentMask, temp2
    900900
    901     if ARM or ARMv7_TRADITIONAL or ARMv7 or ARM64 or C_LOOP or MIPS
     901    if ARM or ARMv7_TRADITIONAL or ARMv7 or ARM64 or ARM64E or C_LOOP or MIPS
    902902        addp 2 * PtrSize, sp
    903903        subi 2 * PtrSize, temp2
     
    10521052        move cfr, sp # restore the previous sp
    10531053        # pop the callerFrame since we will jump to a function that wants to save it
    1054         if ARM64
     1054        if ARM64 or ARM64E
    10551055            pop lr, cfr
    10561056        elsif ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
     
    12281228        _relativePCBase:
    12291229            pop pcBase
    1230         elsif ARM64
     1230        elsif ARM64 or ARM64E
    12311231        elsif ARMv7
    12321232        _relativePCBase:
     
    12551255        move index, t4
    12561256        storep t3, [a0, t4, 4]
    1257     elsif ARM64
     1257    elsif ARM64 or ARM64E
    12581258        pcrtoaddr label, t1
    12591259        move index, t4
  • trunk/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm

    r229354 r229364  
    5555macro cCall2(function)
    5656    checkStackPointerAlignment(t4, 0xbad0c002)
    57     if X86_64 or ARM64
     57    if X86_64 or ARM64 or ARM64E
    5858        call function
    5959    elsif X86_64_WIN
     
    102102macro cCall4(function)
    103103    checkStackPointerAlignment(t4, 0xbad0c004)
    104     if X86_64 or ARM64
     104    if X86_64 or ARM64 or ARM64E
    105105        call function
    106106    elsif X86_64_WIN
     
    216216
    217217.copyArgsDone:
    218     if ARM64
     218    if ARM64 or ARM64E
    219219        move sp, t4
    220220        storep t4, VM::topCallFrame[vm]
     
    554554.noExtraSlot:
    555555    if POINTER_PROFILING
    556         if ARM64
     556        if ARM64 or ARM64E
    557557            loadp 8[cfr], lr
    558558        end
     
    589589        tagReturnAddress t1
    590590
    591         if ARM64
     591        if ARM64 or ARM64E
    592592            storep lr, 8[cfr]
    593593        end
     
    21362136    loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
    21372137    storep cfr, VM::topCallFrame[t1]
    2138     if ARM64 or C_LOOP
     2138    if ARM64 or ARM64E or C_LOOP
    21392139        storep lr, ReturnPC[cfr]
    21402140    end
     
    21812181    loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
    21822182    storep cfr, VM::topCallFrame[t1]
    2183     if ARM64 or C_LOOP
     2183    if ARM64 or ARM64E or C_LOOP
    21842184        storep lr, ReturnPC[cfr]
    21852185    end
  • trunk/Source/JavaScriptCore/offlineasm/backends.rb

    r228402 r229364  
    3030require "cloop"
    3131
     32begin
     33    require "arm64e"
     34rescue LoadError
     35end
     36
    3237BACKENDS =
    3338    [
     
    4045     "ARMv7_TRADITIONAL",
    4146     "ARM64",
     47     "ARM64E",
    4248     "MIPS",
    4349     "C_LOOP"
     
    5965     "ARMv7_TRADITIONAL",
    6066     "ARM64",
     67     "ARM64E",
    6168     "MIPS",
    6269     "C_LOOP"
Note: See TracChangeset for help on using the changeset viewer.