Changeset 229364 in webkit
- Timestamp:
- Mar 7, 2018 10:04:30 AM (6 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r229363 r229364 1 2018-03-07 Mark Lam <mark.lam@apple.com> 2 3 Add support for ARM64E. 4 https://bugs.webkit.org/show_bug.cgi?id=183398 5 <rdar://problem/38212621> 6 7 Reviewed by Michael Saboff. 8 9 * assembler/MacroAssembler.h: 10 * llint/LLIntOfflineAsmConfig.h: 11 * llint/LowLevelInterpreter.asm: 12 * llint/LowLevelInterpreter64.asm: 13 * offlineasm/backends.rb: 14 1 15 2018-03-07 Yusuke Suzuki <utatane.tea@gmail.com> 2 16 -
trunk/Source/JavaScriptCore/assembler/MacroAssembler.h
r229087 r229364 35 35 #include "MacroAssemblerARMv7.h" 36 36 namespace JSC { typedef MacroAssemblerARMv7 MacroAssemblerBase; }; 37 38 #elif CPU(ARM64E) && __has_include(<WebKitAdditions/MacroAssemblerARM64E.h>) 39 #define TARGET_ASSEMBLER ARM64EAssembler 40 #define TARGET_MACROASSEMBLER MacroAssemblerARM64E 41 #include <WebKitAdditions/MacroAssemblerARM64E.h> 37 42 38 43 #elif CPU(ARM64) -
trunk/Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h
r229354 r229364 119 119 #if CPU(ARM64E) 120 120 #define OFFLINE_ASM_ARM64E 1 121 #undef OFFLINE_ASM_ARM64 122 #define OFFLINE_ASM_ARM64 0 // Pretend that ARM64 and ARM64E are mutually exclusive to please the offlineasm. 121 123 #else 122 124 #define OFFLINE_ASM_ARM64E 0 -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
r229354 r229364 72 72 # registers on all architectures. 73 73 # 74 # - lr is defined on non-X86 architectures (ARM64, ARM v7, ARM,74 # - lr is defined on non-X86 architectures (ARM64, ARM64E, ARMv7, ARM, 75 75 # ARMv7_TRADITIONAL, MIPS and CLOOP) and holds the return PC 76 76 # … … 224 224 const CallOpCodeSize = 9 225 225 226 if X86_64 or ARM64 or C_LOOP226 if X86_64 or ARM64 or ARM64E or C_LOOP 227 227 const maxFrameExtentForSlowPathCall = 0 228 228 elsif ARM or ARMv7_TRADITIONAL or ARMv7 … … 236 236 end 237 237 238 if X86_64 or X86_64_WIN or ARM64 238 if X86_64 or X86_64_WIN or ARM64 or ARM64E 239 239 const CalleeSaveSpaceAsVirtualRegisters = 3 240 240 else … … 279 279 # This requires an add before the call, and a sub after. 280 280 const PC = t4 # When changing this, make sure LLIntPC is up to date in LLIntPCRanges.h 281 if ARM64 281 if ARM64 or ARM64E 282 282 const PB = csr7 283 283 const tagTypeNumber = csr8 … … 503 503 # ) 504 504 # 505 if X86_64 or ARM64 505 if X86_64 or ARM64 or ARM64E 506 506 macro probe(action) 507 507 # save all the registers that the LLInt may use. 508 if ARM64 508 if ARM64 or ARM64E 509 509 push cfr, lr 510 510 end … … 514 514 push t2, t3 515 515 push t4, t5 516 if ARM64 516 if ARM64 or ARM64E 517 517 push csr0, csr1 518 518 push csr2, csr3 … … 525 525 526 526 # restore all the registers we saved previously. 527 if ARM64 527 if ARM64 or ARM64E 528 528 pop csr9, csr8 529 529 pop csr7, csr6 … … 537 537 pop a3, a2 538 538 pop a1, a0 539 if ARM64 539 if ARM64 or ARM64E 540 540 pop lr, cfr 541 541 end … … 547 547 548 548 macro checkStackPointerAlignment(tempReg, location) 549 if ARM64 or C_LOOP550 # ARM64 will check for us!549 if ARM64 or ARM64E or C_LOOP 550 # ARM64 and ARM64E will check for us! 551 551 # C_LOOP does not need the alignment, and can use a little perf 552 552 # improvement from avoiding useless work. … … 566 566 end 567 567 568 if C_LOOP or ARM64 or X86_64 or X86_64_WIN568 if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN 569 569 const CalleeSaveRegisterCount = 0 570 570 elsif ARM or ARMv7_TRADITIONAL or ARMv7 … … 583 583 584 584 macro pushCalleeSaves() 585 if C_LOOP or ARM64 or X86_64 or X86_64_WIN585 if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN 586 586 elsif ARM or ARMv7_TRADITIONAL 587 587 emit "push {r4-r10}" … … 605 605 606 606 macro popCalleeSaves() 607 if C_LOOP or ARM64 or X86_64 or X86_64_WIN607 if C_LOOP or ARM64 or ARM64E or X86_64 or X86_64_WIN 608 608 elsif ARM or ARMv7_TRADITIONAL 609 609 emit "pop {r4-r10}" … … 630 630 elsif X86 or X86_WIN or X86_64 or X86_64_WIN 631 631 push cfr 632 elsif ARM64 632 elsif ARM64 or ARM64E 633 633 push cfr, lr 634 634 else … … 645 645 elsif X86 or X86_WIN or X86_64 or X86_64_WIN 646 646 pop cfr 647 elsif ARM64 647 elsif ARM64 or ARM64E 648 648 pop lr, cfr 649 649 end … … 655 655 elsif ARM or ARMv7_TRADITIONAL 656 656 elsif ARMv7 657 elsif ARM64 657 elsif ARM64 or ARM64E 658 658 emit "stp x27, x28, [x29, #-16]" 659 659 emit "stp xzr, x26, [x29, #-32]" … … 676 676 elsif ARM or ARMv7_TRADITIONAL 677 677 elsif ARMv7 678 elsif ARM64 678 elsif ARM64 or ARM64E 679 679 emit "ldp xzr, x26, [x29, #-32]" 680 680 emit "ldp x27, x28, [x29, #-16]" … … 694 694 695 695 macro copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm, temp) 696 if ARM64 or X86_64 or X86_64_WIN696 if ARM64 or ARM64E or X86_64 or X86_64_WIN 697 697 loadp VM::topEntryFrame[vm], temp 698 698 vmEntryRecord(temp, temp) 699 699 leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp 700 if ARM64 700 if ARM64 or ARM64E 701 701 storep csr0, [temp] 702 702 storep csr1, 8[temp] … … 736 736 737 737 macro restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(vm, temp) 738 if ARM64 or X86_64 or X86_64_WIN738 if ARM64 or ARM64E or X86_64 or X86_64_WIN 739 739 loadp VM::topEntryFrame[vm], temp 740 740 vmEntryRecord(temp, temp) 741 741 leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp 742 if ARM64 742 if ARM64 or ARM64E 743 743 loadp [temp], csr0 744 744 loadp 8[temp], csr1 … … 778 778 779 779 macro preserveReturnAddressAfterCall(destinationRegister) 780 if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS780 if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or ARM64E or MIPS 781 781 # In C_LOOP case, we're only preserving the bytecode vPC. 782 782 move lr, destinationRegister … … 799 799 if X86 or X86_WIN or X86_64 or X86_64_WIN 800 800 push cfr 801 elsif ARM64 801 elsif ARM64 or ARM64E 802 802 push cfr, lr 803 803 elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS … … 811 811 if X86 or X86_WIN or X86_64 or X86_64_WIN 812 812 pop cfr 813 elsif ARM64 813 elsif ARM64 or ARM64E 814 814 pop lr, cfr 815 815 elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS … … 899 899 andi ~StackAlignmentMask, temp2 900 900 901 if ARM or ARMv7_TRADITIONAL or ARMv7 or ARM64 or C_LOOP or MIPS901 if ARM or ARMv7_TRADITIONAL or ARMv7 or ARM64 or ARM64E or C_LOOP or MIPS 902 902 addp 2 * PtrSize, sp 903 903 subi 2 * PtrSize, temp2 … … 1052 1052 move cfr, sp # restore the previous sp 1053 1053 # pop the callerFrame since we will jump to a function that wants to save it 1054 if ARM64 1054 if ARM64 or ARM64E 1055 1055 pop lr, cfr 1056 1056 elsif ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS … … 1228 1228 _relativePCBase: 1229 1229 pop pcBase 1230 elsif ARM64 1230 elsif ARM64 or ARM64E 1231 1231 elsif ARMv7 1232 1232 _relativePCBase: … … 1255 1255 move index, t4 1256 1256 storep t3, [a0, t4, 4] 1257 elsif ARM64 1257 elsif ARM64 or ARM64E 1258 1258 pcrtoaddr label, t1 1259 1259 move index, t4 -
trunk/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
r229354 r229364 55 55 macro cCall2(function) 56 56 checkStackPointerAlignment(t4, 0xbad0c002) 57 if X86_64 or ARM64 57 if X86_64 or ARM64 or ARM64E 58 58 call function 59 59 elsif X86_64_WIN … … 102 102 macro cCall4(function) 103 103 checkStackPointerAlignment(t4, 0xbad0c004) 104 if X86_64 or ARM64 104 if X86_64 or ARM64 or ARM64E 105 105 call function 106 106 elsif X86_64_WIN … … 216 216 217 217 .copyArgsDone: 218 if ARM64 218 if ARM64 or ARM64E 219 219 move sp, t4 220 220 storep t4, VM::topCallFrame[vm] … … 554 554 .noExtraSlot: 555 555 if POINTER_PROFILING 556 if ARM64 556 if ARM64 or ARM64E 557 557 loadp 8[cfr], lr 558 558 end … … 589 589 tagReturnAddress t1 590 590 591 if ARM64 591 if ARM64 or ARM64E 592 592 storep lr, 8[cfr] 593 593 end … … 2136 2136 loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1 2137 2137 storep cfr, VM::topCallFrame[t1] 2138 if ARM64 or C_LOOP2138 if ARM64 or ARM64E or C_LOOP 2139 2139 storep lr, ReturnPC[cfr] 2140 2140 end … … 2181 2181 loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1 2182 2182 storep cfr, VM::topCallFrame[t1] 2183 if ARM64 or C_LOOP2183 if ARM64 or ARM64E or C_LOOP 2184 2184 storep lr, ReturnPC[cfr] 2185 2185 end -
trunk/Source/JavaScriptCore/offlineasm/backends.rb
r228402 r229364 30 30 require "cloop" 31 31 32 begin 33 require "arm64e" 34 rescue LoadError 35 end 36 32 37 BACKENDS = 33 38 [ … … 40 45 "ARMv7_TRADITIONAL", 41 46 "ARM64", 47 "ARM64E", 42 48 "MIPS", 43 49 "C_LOOP" … … 59 65 "ARMv7_TRADITIONAL", 60 66 "ARM64", 67 "ARM64E", 61 68 "MIPS", 62 69 "C_LOOP"
Note: See TracChangeset
for help on using the changeset viewer.