Changeset 249449 in webkit
- Timestamp:
- Sep 3, 2019, 6:47:02 PM (6 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r249445 r249449 1 2019-09-03 Mark Lam <mark.lam@apple.com> 2 3 Remove the need to pass performJITMemcpy as a pointer. 4 https://bugs.webkit.org/show_bug.cgi?id=201413 5 6 Reviewed by Michael Saboff. 7 8 We want performJITMemcpy to always be inlined. In this patch, we also clean up 9 some template parameters to use enums instead of booleans to better document the 10 intent of the code. 11 12 * assembler/ARM64Assembler.h: 13 (JSC::ARM64Assembler::fillNops): 14 (JSC::ARM64Assembler::linkJump): 15 (JSC::ARM64Assembler::linkCall): 16 (JSC::ARM64Assembler::relinkJump): 17 (JSC::ARM64Assembler::relinkCall): 18 (JSC::ARM64Assembler::link): 19 (JSC::ARM64Assembler::linkJumpOrCall): 20 (JSC::ARM64Assembler::linkCompareAndBranch): 21 (JSC::ARM64Assembler::linkConditionalBranch): 22 (JSC::ARM64Assembler::linkTestAndBranch): 23 (JSC::ARM64Assembler::relinkJumpOrCall): 24 (JSC::ARM64Assembler::CopyFunction::CopyFunction): Deleted. 25 (JSC::ARM64Assembler::CopyFunction::operator()): Deleted. 26 * assembler/ARMv7Assembler.h: 27 (JSC::ARMv7Assembler::fillNops): 28 (JSC::ARMv7Assembler::link): 29 (JSC::ARMv7Assembler::linkJumpT1): 30 (JSC::ARMv7Assembler::linkJumpT2): 31 (JSC::ARMv7Assembler::linkJumpT3): 32 (JSC::ARMv7Assembler::linkJumpT4): 33 (JSC::ARMv7Assembler::linkConditionalJumpT4): 34 (JSC::ARMv7Assembler::linkBX): 35 (JSC::ARMv7Assembler::linkConditionalBX): 36 * assembler/AbstractMacroAssembler.h: 37 (JSC::AbstractMacroAssembler::emitNops): 38 * assembler/LinkBuffer.cpp: 39 (JSC::LinkBuffer::copyCompactAndLinkCode): 40 * assembler/MIPSAssembler.h: 41 (JSC::MIPSAssembler::fillNops): 42 * assembler/MacroAssemblerARM64.h: 43 (JSC::MacroAssemblerARM64::link): 44 * assembler/MacroAssemblerARMv7.h: 45 (JSC::MacroAssemblerARMv7::link): 46 * assembler/X86Assembler.h: 47 (JSC::X86Assembler::fillNops): 48 * jit/ExecutableAllocator.h: 49 (JSC::performJITMemcpy): 50 * runtime/JSCPtrTag.h: 51 1 52 2019-09-03 Devin Rousso <drousso@apple.com> 2 53 -
trunk/Source/JavaScriptCore/assembler/ARM64Assembler.h
r247799 r249449 1495 1495 } 1496 1496 1497 template <typename CopyFunction> 1498 static void fillNops(void* base, size_t size, CopyFunction copy) 1497 enum BranchTargetType { DirectBranch, IndirectBranch }; 1498 using CopyFunction = void*(&)(void*, const void*, size_t); 1499 1500 template <CopyFunction copy> 1501 static void fillNops(void* base, size_t size) 1499 1502 { 1500 1503 RELEASE_ASSERT(!(size % sizeof(int32_t))); … … 2553 2556 { 2554 2557 ASSERT(from.isSet()); 2555 relinkJumpOrCall< false>(addressOf(code, from), addressOf(code, from), to);2558 relinkJumpOrCall<BranchType_JMP>(addressOf(code, from), addressOf(code, from), to); 2556 2559 } 2557 2560 … … 2559 2562 { 2560 2563 ASSERT(from.isSet()); 2561 linkJumpOrCall< true>(addressOf(code, from) - 1, addressOf(code, from) - 1, to);2564 linkJumpOrCall<BranchType_CALL>(addressOf(code, from) - 1, addressOf(code, from) - 1, to); 2562 2565 } 2563 2566 … … 2755 2758 static void relinkJump(void* from, void* to) 2756 2759 { 2757 relinkJumpOrCall< false>(reinterpret_cast<int*>(from), reinterpret_cast<const int*>(from), to);2760 relinkJumpOrCall<BranchType_JMP>(reinterpret_cast<int*>(from), reinterpret_cast<const int*>(from), to); 2758 2761 cacheFlush(from, sizeof(int)); 2759 2762 } … … 2766 2769 static void relinkCall(void* from, void* to) 2767 2770 { 2768 relinkJumpOrCall< true>(reinterpret_cast<int*>(from) - 1, reinterpret_cast<const int*>(from) - 1, to);2771 relinkJumpOrCall<BranchType_CALL>(reinterpret_cast<int*>(from) - 1, reinterpret_cast<const int*>(from) - 1, to); 2769 2772 cacheFlush(reinterpret_cast<int*>(from) - 1, sizeof(int)); 2770 2773 } … … 2911 2914 } 2912 2915 2913 #if CPU(ARM64E) 2914 class CopyFunction { 2915 typedef void* (*Func)(void*, const void*, size_t); 2916 public: 2917 CopyFunction(Func func) 2918 : m_func(func) 2919 { 2920 assertIsNullOrTaggedWith(func, CopyFunctionPtrTag); 2921 } 2922 2923 void* operator()(void* dst, const void* src, size_t size) 2924 { 2925 return ptrauth_auth_function(m_func, ptrauth_key_process_dependent_code, CopyFunctionPtrTag)(dst, src, size); 2926 } 2927 2928 private: 2929 Func m_func; 2930 }; 2931 #else 2932 typedef void* (*CopyFunction)(void*, const void*, size_t); 2933 #endif 2934 2935 static void ALWAYS_INLINE link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction8, uint8_t* to, CopyFunction copy) 2916 template<CopyFunction copy> 2917 static void ALWAYS_INLINE link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction8, uint8_t* to) 2936 2918 { 2937 2919 const int* fromInstruction = reinterpret_cast<const int*>(fromInstruction8); 2938 2920 switch (record.linkType()) { 2939 2921 case LinkJumpNoCondition: 2940 linkJumpOrCall< false>(reinterpret_cast<int*>(from), fromInstruction, to, copy);2922 linkJumpOrCall<BranchType_JMP, copy>(reinterpret_cast<int*>(from), fromInstruction, to); 2941 2923 break; 2942 2924 case LinkJumpConditionDirect: 2943 linkConditionalBranch< true>(record.condition(), reinterpret_cast<int*>(from), fromInstruction, to, copy);2925 linkConditionalBranch<DirectBranch, copy>(record.condition(), reinterpret_cast<int*>(from), fromInstruction, to); 2944 2926 break; 2945 2927 case LinkJumpCondition: 2946 linkConditionalBranch< false>(record.condition(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to, copy);2928 linkConditionalBranch<IndirectBranch, copy>(record.condition(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to); 2947 2929 break; 2948 2930 case LinkJumpCompareAndBranchDirect: 2949 linkCompareAndBranch< true>(record.condition(), record.is64Bit(), record.compareRegister(), reinterpret_cast<int*>(from), fromInstruction, to, copy);2931 linkCompareAndBranch<DirectBranch, copy>(record.condition(), record.is64Bit(), record.compareRegister(), reinterpret_cast<int*>(from), fromInstruction, to); 2950 2932 break; 2951 2933 case LinkJumpCompareAndBranch: 2952 linkCompareAndBranch< false>(record.condition(), record.is64Bit(), record.compareRegister(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to, copy);2934 linkCompareAndBranch<IndirectBranch, copy>(record.condition(), record.is64Bit(), record.compareRegister(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to); 2953 2935 break; 2954 2936 case LinkJumpTestBitDirect: 2955 linkTestAndBranch< true>(record.condition(), record.bitNumber(), record.compareRegister(), reinterpret_cast<int*>(from), fromInstruction, to, copy);2937 linkTestAndBranch<DirectBranch, copy>(record.condition(), record.bitNumber(), record.compareRegister(), reinterpret_cast<int*>(from), fromInstruction, to); 2956 2938 break; 2957 2939 case LinkJumpTestBit: 2958 linkTestAndBranch< false>(record.condition(), record.bitNumber(), record.compareRegister(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to, copy);2940 linkTestAndBranch<IndirectBranch, copy>(record.condition(), record.bitNumber(), record.compareRegister(), reinterpret_cast<int*>(from) - 1, fromInstruction - 1, to); 2959 2941 break; 2960 2942 default: … … 2997 2979 } 2998 2980 2999 template<bool isCall> 3000 static void linkJumpOrCall(int* from, const int* fromInstruction, void* to, CopyFunction copy = tagCFunctionPtr<CopyFunctionPtrTag>(performJITMemcpy)) 3001 { 2981 template<BranchType type, CopyFunction copy = performJITMemcpy> 2982 static void linkJumpOrCall(int* from, const int* fromInstruction, void* to) 2983 { 2984 static_assert(type == BranchType_JMP || type == BranchType_CALL, ""); 2985 3002 2986 bool link; 3003 2987 int imm26; … … 3005 2989 3006 2990 ASSERT_UNUSED(isUnconditionalBranchImmediateOrNop, isUnconditionalBranchImmediateOrNop); 2991 constexpr bool isCall = (type == BranchType_CALL); 3007 2992 ASSERT_UNUSED(isCall, (link == isCall) || disassembleNop(from)); 3008 2993 ASSERT(!(reinterpret_cast<intptr_t>(from) & 3)); … … 3018 3003 } 3019 3004 3020 template< bool isDirect>3021 static void linkCompareAndBranch(Condition condition, bool is64Bit, RegisterID rt, int* from, const int* fromInstruction, void* to , CopyFunction copy = tagCFunctionPtr<CopyFunctionPtrTag>(performJITMemcpy))3005 template<BranchTargetType type, CopyFunction copy = performJITMemcpy> 3006 static void linkCompareAndBranch(Condition condition, bool is64Bit, RegisterID rt, int* from, const int* fromInstruction, void* to) 3022 3007 { 3023 3008 ASSERT(!(reinterpret_cast<intptr_t>(from) & 3)); … … 3027 3012 3028 3013 bool useDirect = isInt<19>(offset); 3029 ASSERT( !isDirect|| useDirect);3030 3031 if (useDirect || isDirect) {3014 ASSERT(type == IndirectBranch || useDirect); 3015 3016 if (useDirect || type == DirectBranch) { 3032 3017 int insn = compareAndBranchImmediate(is64Bit ? Datasize_64 : Datasize_32, condition == ConditionNE, static_cast<int>(offset), rt); 3033 3018 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3034 3019 copy(from, &insn, sizeof(int)); 3035 if ( !isDirect) {3020 if (type == IndirectBranch) { 3036 3021 insn = nopPseudo(); 3037 3022 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from + 1) == (from + 1)); … … 3042 3027 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3043 3028 copy(from, &insn, sizeof(int)); 3044 linkJumpOrCall< false>(from + 1, fromInstruction + 1, to, copy);3029 linkJumpOrCall<BranchType_JMP, copy>(from + 1, fromInstruction + 1, to); 3045 3030 } 3046 3031 } 3047 3032 3048 template< bool isDirect>3049 static void linkConditionalBranch(Condition condition, int* from, const int* fromInstruction, void* to , CopyFunction copy = tagCFunctionPtr<CopyFunctionPtrTag>(performJITMemcpy))3033 template<BranchTargetType type, CopyFunction copy = performJITMemcpy> 3034 static void linkConditionalBranch(Condition condition, int* from, const int* fromInstruction, void* to) 3050 3035 { 3051 3036 ASSERT(!(reinterpret_cast<intptr_t>(from) & 3)); … … 3055 3040 3056 3041 bool useDirect = isInt<19>(offset); 3057 ASSERT( !isDirect|| useDirect);3058 3059 if (useDirect || isDirect) {3042 ASSERT(type == IndirectBranch || useDirect); 3043 3044 if (useDirect || type == DirectBranch) { 3060 3045 int insn = conditionalBranchImmediate(static_cast<int>(offset), condition); 3061 3046 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3062 3047 copy(from, &insn, sizeof(int)); 3063 if ( !isDirect) {3048 if (type == IndirectBranch) { 3064 3049 insn = nopPseudo(); 3065 3050 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from + 1) == (from + 1)); … … 3070 3055 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3071 3056 copy(from, &insn, sizeof(int)); 3072 linkJumpOrCall< false>(from + 1, fromInstruction + 1, to, copy);3057 linkJumpOrCall<BranchType_JMP, copy>(from + 1, fromInstruction + 1, to); 3073 3058 } 3074 3059 } 3075 3060 3076 template< bool isDirect>3077 static void linkTestAndBranch(Condition condition, unsigned bitNumber, RegisterID rt, int* from, const int* fromInstruction, void* to , CopyFunction copy = tagCFunctionPtr<CopyFunctionPtrTag>(performJITMemcpy))3061 template<BranchTargetType type, CopyFunction copy = performJITMemcpy> 3062 static void linkTestAndBranch(Condition condition, unsigned bitNumber, RegisterID rt, int* from, const int* fromInstruction, void* to) 3078 3063 { 3079 3064 ASSERT(!(reinterpret_cast<intptr_t>(from) & 3)); … … 3084 3069 3085 3070 bool useDirect = isInt<14>(offset); 3086 ASSERT( !isDirect|| useDirect);3087 3088 if (useDirect || isDirect) {3071 ASSERT(type == IndirectBranch || useDirect); 3072 3073 if (useDirect || type == DirectBranch) { 3089 3074 int insn = testAndBranchImmediate(condition == ConditionNE, static_cast<int>(bitNumber), static_cast<int>(offset), rt); 3090 3075 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3091 3076 copy(from, &insn, sizeof(int)); 3092 if ( !isDirect) {3077 if (type == IndirectBranch) { 3093 3078 insn = nopPseudo(); 3094 3079 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from + 1) == (from + 1)); … … 3099 3084 RELEASE_ASSERT(roundUpToMultipleOf<instructionSize>(from) == from); 3100 3085 copy(from, &insn, sizeof(int)); 3101 linkJumpOrCall< false>(from + 1, fromInstruction + 1, to, copy);3086 linkJumpOrCall<BranchType_JMP, copy>(from + 1, fromInstruction + 1, to); 3102 3087 } 3103 3088 } 3104 3089 3105 template< bool isCall>3090 template<BranchType type> 3106 3091 static void relinkJumpOrCall(int* from, const int* fromInstruction, void* to) 3107 3092 { 3108 if (!isCall && disassembleNop(from)) { 3093 static_assert(type == BranchType_JMP || type == BranchType_CALL, ""); 3094 if ((type == BranchType_JMP) && disassembleNop(from)) { 3109 3095 unsigned op01; 3110 3096 int imm19; … … 3114 3100 if (isConditionalBranchImmediate) { 3115 3101 ASSERT_UNUSED(op01, !op01); 3116 ASSERT _UNUSED(isCall, !isCall);3102 ASSERT(type == BranchType_JMP); 3117 3103 3118 3104 if (imm19 == 8) 3119 3105 condition = invert(condition); 3120 3106 3121 linkConditionalBranch< false>(condition, from - 1, fromInstruction - 1, to);3107 linkConditionalBranch<IndirectBranch>(condition, from - 1, fromInstruction - 1, to); 3122 3108 return; 3123 3109 } … … 3132 3118 op = !op; 3133 3119 3134 linkCompareAndBranch< false>(op ? ConditionNE : ConditionEQ, opSize == Datasize_64, rt, from - 1, fromInstruction - 1, to);3120 linkCompareAndBranch<IndirectBranch>(op ? ConditionNE : ConditionEQ, opSize == Datasize_64, rt, from - 1, fromInstruction - 1, to); 3135 3121 return; 3136 3122 } … … 3144 3130 op = !op; 3145 3131 3146 linkTestAndBranch< false>(op ? ConditionNE : ConditionEQ, bitNumber, rt, from - 1, fromInstruction - 1, to);3132 linkTestAndBranch<IndirectBranch>(op ? ConditionNE : ConditionEQ, bitNumber, rt, from - 1, fromInstruction - 1, to); 3147 3133 return; 3148 3134 } 3149 3135 } 3150 3136 3151 linkJumpOrCall< isCall>(from, fromInstruction, to);3137 linkJumpOrCall<type>(from, fromInstruction, to); 3152 3138 } 3153 3139 -
trunk/Source/JavaScriptCore/assembler/ARMv7Assembler.h
r247097 r249449 1 1 /* 2 * Copyright (C) 2009-201 7Apple Inc. All rights reserved.2 * Copyright (C) 2009-2019 Apple Inc. All rights reserved. 3 3 * Copyright (C) 2010 University of Szeged 4 4 * … … 1970 1970 } 1971 1971 1972 template <typename CopyFunction> 1973 static void fillNops(void* base, size_t size, CopyFunction copy) 1972 using CopyFunction = void*(&)(void*, const void*, size_t); 1973 1974 template <CopyFunction copy> 1975 static void fillNops(void* base, size_t size) 1974 1976 { 1975 1977 RELEASE_ASSERT(!(size % sizeof(int16_t))); … … 2124 2126 } 2125 2127 2126 typedef void* (*CopyFunction)(void*, const void*, size_t); 2127 2128 static void ALWAYS_INLINE link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction8, uint8_t* to, CopyFunction copy) 2128 template<CopyFunction copy> 2129 static void ALWAYS_INLINE link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction8, uint8_t* to) 2129 2130 { 2130 2131 const uint16_t* fromInstruction = reinterpret_cast_ptr<const uint16_t*>(fromInstruction8); 2131 2132 switch (record.linkType()) { 2132 2133 case LinkJumpT1: 2133 linkJumpT1 (record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2134 linkJumpT1<copy>(record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2134 2135 break; 2135 2136 case LinkJumpT2: 2136 linkJumpT2 (reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2137 linkJumpT2<copy>(reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2137 2138 break; 2138 2139 case LinkJumpT3: 2139 linkJumpT3 (record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2140 linkJumpT3<copy>(record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2140 2141 break; 2141 2142 case LinkJumpT4: 2142 linkJumpT4 (reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2143 linkJumpT4<copy>(reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2143 2144 break; 2144 2145 case LinkConditionalJumpT4: 2145 linkConditionalJumpT4 (record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2146 linkConditionalJumpT4<copy>(record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2146 2147 break; 2147 2148 case LinkConditionalBX: 2148 linkConditionalBX (record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2149 linkConditionalBX<copy>(record.condition(), reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2149 2150 break; 2150 2151 case LinkBX: 2151 linkBX (reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to, copy);2152 linkBX<copy>(reinterpret_cast_ptr<uint16_t*>(from), fromInstruction, to); 2152 2153 break; 2153 2154 default: … … 2599 2600 return ((relative << 7) >> 7) == relative; 2600 2601 } 2601 2602 static void linkJumpT1(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2602 2603 template<CopyFunction copy = performJITMemcpy> 2604 static void linkJumpT1(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target) 2603 2605 { 2604 2606 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2618 2620 copy(writeTarget - 1, &newInstruction, sizeof(uint16_t)); 2619 2621 } 2620 2621 static void linkJumpT2(uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2622 2623 template<CopyFunction copy = performJITMemcpy> 2624 static void linkJumpT2(uint16_t* writeTarget, const uint16_t* instruction, void* target) 2622 2625 { 2623 2626 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2638 2641 } 2639 2642 2640 static void linkJumpT3(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2643 template<CopyFunction copy = performJITMemcpy> 2644 static void linkJumpT3(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target) 2641 2645 { 2642 2646 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2655 2659 } 2656 2660 2657 static void linkJumpT4(uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2661 template<CopyFunction copy = performJITMemcpy> 2662 static void linkJumpT4(uint16_t* writeTarget, const uint16_t* instruction, void* target) 2658 2663 { 2659 2664 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2674 2679 copy(writeTarget - 2, instructions, 2 * sizeof(uint16_t)); 2675 2680 } 2676 2677 static void linkConditionalJumpT4(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2681 2682 template<CopyFunction copy = performJITMemcpy> 2683 static void linkConditionalJumpT4(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target) 2678 2684 { 2679 2685 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2683 2689 uint16_t newInstruction = ifThenElse(cond) | OP_IT; 2684 2690 copy(writeTarget - 3, &newInstruction, sizeof(uint16_t)); 2685 linkJumpT4(writeTarget, instruction, target, copy); 2686 } 2687 2688 static void linkBX(uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2691 linkJumpT4<copy>(writeTarget, instruction, target); 2692 } 2693 2694 template<CopyFunction copy = performJITMemcpy> 2695 static void linkBX(uint16_t* writeTarget, const uint16_t* instruction, void* target) 2689 2696 { 2690 2697 // FIMXE: this should be up in the MacroAssembler layer. :-( … … 2705 2712 copy(writeTarget - 5, instructions, 5 * sizeof(uint16_t)); 2706 2713 } 2707 2708 static void linkConditionalBX(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target, CopyFunction copy = performJITMemcpy) 2714 2715 template<CopyFunction copy = performJITMemcpy> 2716 static void linkConditionalBX(Condition cond, uint16_t* writeTarget, const uint16_t* instruction, void* target) 2709 2717 { 2710 2718 // FIMXE: this should be up in the MacroAssembler layer. :-( -
trunk/Source/JavaScriptCore/assembler/AbstractMacroAssembler.h
r248546 r249449 960 960 } 961 961 962 #if COMPILER(GCC) 963 // Workaround for GCC demanding that memcpy "must be the name of a function with external linkage". 964 static void* memcpy(void* dst, const void* src, size_t size) 965 { 966 return std::memcpy(dst, src, size); 967 } 968 #endif 969 962 970 void emitNops(size_t memoryToFillWithNopsInBytes) 963 971 { … … 971 979 size_t targetCodeSize = startCodeSize + memoryToFillWithNopsInBytes; 972 980 buffer.ensureSpace(memoryToFillWithNopsInBytes); 973 AssemblerType:: fillNops(static_cast<char*>(buffer.data()) + startCodeSize, memoryToFillWithNopsInBytes, memcpy);981 AssemblerType::template fillNops<memcpy>(static_cast<char*>(buffer.data()) + startCodeSize, memoryToFillWithNopsInBytes); 974 982 buffer.setCodeSize(targetCodeSize); 975 983 #endif -
trunk/Source/JavaScriptCore/assembler/LinkBuffer.cpp
r247799 r249449 229 229 recordLinkOffsets(m_assemblerStorage, readPtr, initialSize, readPtr - writePtr); 230 230 231 #if CPU(ARM64E) && ENABLE(FAST_JIT_PERMISSIONS)232 auto memcpyFunction = tagCFunctionPtr<CopyFunctionPtrTag>(memcpy);233 #else234 auto memcpyFunction = tagCFunctionPtr<CopyFunctionPtrTag>(performJITMemcpy);235 #endif236 231 for (unsigned i = 0; i < jumpCount; ++i) { 237 232 uint8_t* location = codeOutData + jumpsToLink[i].from(); 238 233 uint8_t* target = codeOutData + jumpsToLink[i].to() - executableOffsetFor(jumpsToLink[i].to()); 239 MacroAssembler::link(jumpsToLink[i], outData + jumpsToLink[i].from(), location, target, memcpyFunction); 234 #if CPU(ARM64E) && ENABLE(FAST_JIT_PERMISSIONS) 235 MacroAssembler::link<memcpy>(jumpsToLink[i], outData + jumpsToLink[i].from(), location, target); 236 #else 237 MacroAssembler::link<performJITMemcpy>(jumpsToLink[i], outData + jumpsToLink[i].from(), location, target); 238 #endif 240 239 } 241 240 … … 243 242 if (!m_executableMemory) { 244 243 size_t nopSizeInBytes = initialSize - compactSize; 245 MacroAssembler::AssemblerType_T::fillNops(outData + compactSize, nopSizeInBytes, memcpy); 244 #if CPU(ARM64E) && ENABLE(FAST_JIT_PERMISSIONS) 245 Assembler::fillNops<memcpy>(outData + compactSize, nopSizeInBytes); 246 #else 247 Assembler::fillNops<performJITMemcpy>(outData + compactSize, nopSizeInBytes); 248 #endif 246 249 } 247 250 -
trunk/Source/JavaScriptCore/assembler/MIPSAssembler.h
r247097 r249449 163 163 } 164 164 165 template <typename CopyFunction> 166 static void fillNops(void* base, size_t size, CopyFunction copy) 165 using CopyFunction = void*(&)(void*, const void*, size_t); 166 167 template <CopyFunction copy> 168 static void fillNops(void* base, size_t size) 167 169 { 168 170 UNUSED_PARAM(copy); -
trunk/Source/JavaScriptCore/assembler/MacroAssemblerARM64.h
r248878 r249449 84 84 static JumpLinkType computeJumpType(LinkRecord& record, const uint8_t* from, const uint8_t* to) { return Assembler::computeJumpType(record, from, to); } 85 85 static int jumpSizeDelta(JumpType jumpType, JumpLinkType jumpLinkType) { return Assembler::jumpSizeDelta(jumpType, jumpLinkType); } 86 template < typename CopyFunction>87 static void link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction, uint8_t* to , CopyFunction copy) { return Assembler::link(record, from, fromInstruction, to, copy); }86 template <Assembler::CopyFunction copy> 87 static void link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction, uint8_t* to) { return Assembler::link<copy>(record, from, fromInstruction, to); } 88 88 89 89 static const Scale ScalePtr = TimesEight; -
trunk/Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h
r248686 r249449 70 70 static JumpLinkType computeJumpType(LinkRecord& record, const uint8_t* from, const uint8_t* to) { return ARMv7Assembler::computeJumpType(record, from, to); } 71 71 static int jumpSizeDelta(JumpType jumpType, JumpLinkType jumpLinkType) { return ARMv7Assembler::jumpSizeDelta(jumpType, jumpLinkType); } 72 template < typename CopyFunction>73 static void link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction, uint8_t* to , CopyFunction copy) { return ARMv7Assembler::link(record, from, fromInstruction, to, copy); }72 template <Assembler::CopyFunction copy> 73 static void link(LinkRecord& record, uint8_t* from, const uint8_t* fromInstruction, uint8_t* to) { return ARMv7Assembler::link<copy>(record, from, fromInstruction, to); } 74 74 75 75 struct ArmAddress { -
trunk/Source/JavaScriptCore/assembler/X86Assembler.h
r247889 r249449 3953 3953 } 3954 3954 3955 template <typename CopyFunction> 3956 static void fillNops(void* base, size_t size, CopyFunction copy) 3955 using CopyFunction = void*(&)(void*, const void*, size_t); 3956 3957 template <CopyFunction copy> 3958 static void fillNops(void* base, size_t size) 3957 3959 { 3958 3960 UNUSED_PARAM(copy); -
trunk/Source/JavaScriptCore/jit/ExecutableAllocator.h
r244470 r249449 123 123 #endif // ENABLE(SEPARATED_WX_HEAP) 124 124 125 static inlinevoid* performJITMemcpy(void *dst, const void *src, size_t n)125 static ALWAYS_INLINE void* performJITMemcpy(void *dst, const void *src, size_t n) 126 126 { 127 127 #if CPU(ARM64) -
trunk/Source/JavaScriptCore/runtime/JSCPtrTag.h
r248192 r249449 36 36 v(B3CompilationPtrTag) \ 37 37 v(BytecodePtrTag) \ 38 v(CopyFunctionPtrTag) \39 38 v(DOMJITFunctionPtrTag) \ 40 39 v(DisassemblyPtrTag) \
Note:
See TracChangeset
for help on using the changeset viewer.