Changeset 176095 in webkit
- Timestamp:
- Nov 13, 2014 3:00:07 PM (9 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r176087 r176095 1 2014-11-13 Mark Lam <mark.lam@apple.com> 2 3 Change X86/64 JIT probes to save/restore xmm regs as double instead of __m128. 4 <https://webkit.org/b/138708> 5 6 Reviewed by Michael Saboff. 7 8 The JIT code only uses the xmm regs as double registers. This patch changes 9 the storage types of the FP registers in X86Assembler.h to double instead of 10 __m128, and updates the X86 and X86_64 JIT probe implementations accordingly. 11 12 Also made some minor cosmetic changes in the output of the probe dump functions. 13 14 * assembler/MacroAssemblerX86Common.cpp: 15 (JSC::MacroAssemblerX86Common::ProbeContext::dumpCPURegisters): 16 * assembler/X86Assembler.h: 17 * jit/JITStubsX86.h: 18 * jit/JITStubsX86Common.h: 19 * jit/JITStubsX86_64.h: 20 1 21 2014-11-13 Juergen Ributzka <juergen@apple.com> 2 22 -
trunk/Source/JavaScriptCore/assembler/MacroAssemblerX86Common.cpp
r176031 r176095 1 1 /* 2 * Copyright (C) 2013 Apple Inc. All rights reserved.2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved. 3 3 * 4 4 * Redistribution and use in source and binary forms, with or without … … 38 38 #define DUMP_GPREGISTER(_type, _regName) { \ 39 39 int32_t value = reinterpret_cast<int32_t>(cpu._regName); \ 40 dataLogF("%s %6s: 0x%08x 40 dataLogF("%s %6s: 0x%08x %d\n", indentation, #_regName, value, value) ; \ 41 41 } 42 42 #elif CPU(X86_64) 43 43 #define DUMP_GPREGISTER(_type, _regName) { \ 44 44 int64_t value = reinterpret_cast<int64_t>(cpu._regName); \ 45 dataLogF("%s %6s: 0x%016llx 45 dataLogF("%s %6s: 0x%016llx %lld\n", indentation, #_regName, value, value) ; \ 46 46 } 47 47 #endif … … 51 51 52 52 #define DUMP_FPREGISTER(_type, _regName) { \ 53 uint 32_t* u = reinterpret_cast<uint32_t*>(&cpu._regName); \53 uint64_t* u = reinterpret_cast<uint64_t*>(&cpu._regName); \ 54 54 double* d = reinterpret_cast<double*>(&cpu._regName); \ 55 dataLogF("%s %6s: 0x%08x%08x 0x%08x%08x %12g %12g\n", \ 56 indentation, #_regName, u[3], u[2], u[1], u[0], d[1], d[0]); \ 55 dataLogF("%s %6s: 0x%016llx %.13g\n", indentation, #_regName, *u, *d); \ 57 56 } 58 57 FOR_EACH_CPU_FPREGISTER(DUMP_FPREGISTER) -
trunk/Source/JavaScriptCore/assembler/X86Assembler.h
r176072 r176095 1 1 /* 2 * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.2 * Copyright (C) 2008, 2012, 2013, 2014 Apple Inc. All rights reserved. 3 3 * 4 4 * Redistribution and use in source and binary forms, with or without … … 74 74 // probe code is updated later to reflect the JITs' usage of these registers. 75 75 #define FOR_EACH_CPU_FPREGISTER(V) \ 76 V( __m128, xmm0) \77 V( __m128, xmm1) \78 V( __m128, xmm2) \79 V( __m128, xmm3) \80 V( __m128, xmm4) \81 V( __m128, xmm5) \82 V( __m128, xmm6) \83 V( __m128, xmm7) \76 V(double, xmm0) \ 77 V(double, xmm1) \ 78 V(double, xmm2) \ 79 V(double, xmm3) \ 80 V(double, xmm4) \ 81 V(double, xmm5) \ 82 V(double, xmm6) \ 83 V(double, xmm7) \ 84 84 FOR_EACH_X86_64_CPU_FPREGISTER(V) 85 85 … … 102 102 103 103 #define FOR_EACH_X86_64_CPU_FPREGISTER(V) \ 104 V( __m128, xmm8) \105 V( __m128, xmm9) \106 V( __m128, xmm10) \107 V( __m128, xmm11) \108 V( __m128, xmm12) \109 V( __m128, xmm13) \110 V( __m128, xmm14) \111 V( __m128, xmm15)104 V(double, xmm8) \ 105 V(double, xmm9) \ 106 V(double, xmm10) \ 107 V(double, xmm11) \ 108 V(double, xmm12) \ 109 V(double, xmm13) \ 110 V(double, xmm14) \ 111 V(double, xmm15) 112 112 113 113 #endif // CPU(X86_64) -
trunk/Source/JavaScriptCore/jit/JITStubsX86.h
r176031 r176095 1 1 /* 2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.2 * Copyright (C) 2008, 2009, 2013, 2014 Apple Inc. All rights reserved. 3 3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> 4 4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved. … … 96 96 "movl %ecx, " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%ebp)" "\n" 97 97 98 "mov dqa%xmm0, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%ebp)" "\n"99 "mov dqa%xmm1, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%ebp)" "\n"100 "mov dqa%xmm2, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%ebp)" "\n"101 "mov dqa%xmm3, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%ebp)" "\n"102 "mov dqa%xmm4, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%ebp)" "\n"103 "mov dqa%xmm5, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%ebp)" "\n"104 "mov dqa%xmm6, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%ebp)" "\n"105 "mov dqa%xmm7, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%ebp)" "\n"98 "movq %xmm0, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%ebp)" "\n" 99 "movq %xmm1, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%ebp)" "\n" 100 "movq %xmm2, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%ebp)" "\n" 101 "movq %xmm3, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%ebp)" "\n" 102 "movq %xmm4, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%ebp)" "\n" 103 "movq %xmm5, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%ebp)" "\n" 104 "movq %xmm6, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%ebp)" "\n" 105 "movq %xmm7, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%ebp)" "\n" 106 106 107 107 // Reserve stack space for the arg while maintaining the required stack … … 120 120 "movl " STRINGIZE_VALUE_OF(PROBE_CPU_EDI_OFFSET) "(%ebp), %edi" "\n" 121 121 122 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%ebp), %xmm0" "\n"123 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%ebp), %xmm1" "\n"124 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%ebp), %xmm2" "\n"125 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%ebp), %xmm3" "\n"126 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%ebp), %xmm4" "\n"127 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%ebp), %xmm5" "\n"128 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%ebp), %xmm6" "\n"129 "mov dqa" STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%ebp), %xmm7" "\n"122 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%ebp), %xmm0" "\n" 123 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%ebp), %xmm1" "\n" 124 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%ebp), %xmm2" "\n" 125 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%ebp), %xmm3" "\n" 126 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%ebp), %xmm4" "\n" 127 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%ebp), %xmm5" "\n" 128 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%ebp), %xmm6" "\n" 129 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%ebp), %xmm7" "\n" 130 130 131 131 // There are 6 more registers left to restore: -
trunk/Source/JavaScriptCore/jit/JITStubsX86Common.h
r176031 r176095 1 1 /* 2 * Copyright (C) 2013 Apple Inc. All rights reserved.2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved. 3 3 * 4 4 * Redistribution and use in source and binary forms, with or without … … 51 51 #define PROBE_ARG2_OFFSET (2 * PTR_SIZE) 52 52 53 #define PROBE_CPU_EAX_OFFSET (4 * PTR_SIZE) 54 #define PROBE_CPU_EBX_OFFSET (5 * PTR_SIZE) 55 #define PROBE_CPU_ECX_OFFSET (6 * PTR_SIZE) 56 #define PROBE_CPU_EDX_OFFSET (7 * PTR_SIZE) 57 #define PROBE_CPU_ESI_OFFSET (8 * PTR_SIZE) 58 #define PROBE_CPU_EDI_OFFSET (9 * PTR_SIZE) 59 #define PROBE_CPU_EBP_OFFSET (10 * PTR_SIZE) 60 #define PROBE_CPU_ESP_OFFSET (11 * PTR_SIZE) 53 #define PROBE_FIRST_GPR_OFFSET (3 * PTR_SIZE) 54 #define PROBE_CPU_EAX_OFFSET (PROBE_FIRST_GPR_OFFSET + (0 * PTR_SIZE)) 55 #define PROBE_CPU_ECX_OFFSET (PROBE_FIRST_GPR_OFFSET + (1 * PTR_SIZE)) 56 #define PROBE_CPU_EDX_OFFSET (PROBE_FIRST_GPR_OFFSET + (2 * PTR_SIZE)) 57 #define PROBE_CPU_EBX_OFFSET (PROBE_FIRST_GPR_OFFSET + (3 * PTR_SIZE)) 58 #define PROBE_CPU_ESP_OFFSET (PROBE_FIRST_GPR_OFFSET + (4 * PTR_SIZE)) 59 #define PROBE_CPU_EBP_OFFSET (PROBE_FIRST_GPR_OFFSET + (5 * PTR_SIZE)) 60 #define PROBE_CPU_ESI_OFFSET (PROBE_FIRST_GPR_OFFSET + (6 * PTR_SIZE)) 61 #define PROBE_CPU_EDI_OFFSET (PROBE_FIRST_GPR_OFFSET + (7 * PTR_SIZE)) 61 62 62 63 #if CPU(X86) 63 #define PROBE_FIRST_SPECIAL_OFFSET ( 12 * PTR_SIZE)64 #define PROBE_FIRST_SPECIAL_OFFSET (PROBE_FIRST_GPR_OFFSET + (8 * PTR_SIZE)) 64 65 #else // CPU(X86_64) 65 #define PROBE_CPU_R8_OFFSET ( 12 * PTR_SIZE)66 #define PROBE_CPU_R9_OFFSET ( 13 * PTR_SIZE)67 #define PROBE_CPU_R10_OFFSET ( 14 * PTR_SIZE)68 #define PROBE_CPU_R11_OFFSET ( 15 * PTR_SIZE)69 #define PROBE_CPU_R12_OFFSET ( 16 * PTR_SIZE)70 #define PROBE_CPU_R13_OFFSET ( 17 * PTR_SIZE)71 #define PROBE_CPU_R14_OFFSET ( 18 * PTR_SIZE)72 #define PROBE_CPU_R15_OFFSET ( 19 * PTR_SIZE)73 #define PROBE_FIRST_SPECIAL_OFFSET ( 20 * PTR_SIZE)66 #define PROBE_CPU_R8_OFFSET (PROBE_FIRST_GPR_OFFSET + (8 * PTR_SIZE)) 67 #define PROBE_CPU_R9_OFFSET (PROBE_FIRST_GPR_OFFSET + (9 * PTR_SIZE)) 68 #define PROBE_CPU_R10_OFFSET (PROBE_FIRST_GPR_OFFSET + (10 * PTR_SIZE)) 69 #define PROBE_CPU_R11_OFFSET (PROBE_FIRST_GPR_OFFSET + (11 * PTR_SIZE)) 70 #define PROBE_CPU_R12_OFFSET (PROBE_FIRST_GPR_OFFSET + (12 * PTR_SIZE)) 71 #define PROBE_CPU_R13_OFFSET (PROBE_FIRST_GPR_OFFSET + (13 * PTR_SIZE)) 72 #define PROBE_CPU_R14_OFFSET (PROBE_FIRST_GPR_OFFSET + (14 * PTR_SIZE)) 73 #define PROBE_CPU_R15_OFFSET (PROBE_FIRST_GPR_OFFSET + (15 * PTR_SIZE)) 74 #define PROBE_FIRST_SPECIAL_OFFSET (PROBE_FIRST_GPR_OFFSET + (16 * PTR_SIZE)) 74 75 #endif // CPU(X86_64) 75 76 76 77 #define PROBE_CPU_EIP_OFFSET (PROBE_FIRST_SPECIAL_OFFSET + (0 * PTR_SIZE)) 77 78 #define PROBE_CPU_EFLAGS_OFFSET (PROBE_FIRST_SPECIAL_OFFSET + (1 * PTR_SIZE)) 79 #define PROBE_FIRST_XMM_OFFSET (PROBE_FIRST_SPECIAL_OFFSET + (2 * PTR_SIZE)) 78 80 79 #if CPU(X86) 80 #define PROBE_FIRST_XMM_OFFSET (PROBE_FIRST_SPECIAL_OFFSET + (4 * PTR_SIZE)) // After padding. 81 #else // CPU(X86_64) 82 #define PROBE_FIRST_XMM_OFFSET (PROBE_FIRST_SPECIAL_OFFSET + (2 * PTR_SIZE)) // After padding. 83 #endif // CPU(X86_64) 84 85 #define XMM_SIZE 16 81 #define XMM_SIZE 8 86 82 #define PROBE_CPU_XMM0_OFFSET (PROBE_FIRST_XMM_OFFSET + (0 * XMM_SIZE)) 87 83 #define PROBE_CPU_XMM1_OFFSET (PROBE_FIRST_XMM_OFFSET + (1 * XMM_SIZE)) … … 93 89 #define PROBE_CPU_XMM7_OFFSET (PROBE_FIRST_XMM_OFFSET + (7 * XMM_SIZE)) 94 90 91 #if CPU(X86) 95 92 #define PROBE_SIZE (PROBE_CPU_XMM7_OFFSET + XMM_SIZE) 93 #else // CPU(X86_64) 94 #define PROBE_CPU_XMM8_OFFSET (PROBE_FIRST_XMM_OFFSET + (8 * XMM_SIZE)) 95 #define PROBE_CPU_XMM9_OFFSET (PROBE_FIRST_XMM_OFFSET + (9 * XMM_SIZE)) 96 #define PROBE_CPU_XMM10_OFFSET (PROBE_FIRST_XMM_OFFSET + (10 * XMM_SIZE)) 97 #define PROBE_CPU_XMM11_OFFSET (PROBE_FIRST_XMM_OFFSET + (11 * XMM_SIZE)) 98 #define PROBE_CPU_XMM12_OFFSET (PROBE_FIRST_XMM_OFFSET + (12 * XMM_SIZE)) 99 #define PROBE_CPU_XMM13_OFFSET (PROBE_FIRST_XMM_OFFSET + (13 * XMM_SIZE)) 100 #define PROBE_CPU_XMM14_OFFSET (PROBE_FIRST_XMM_OFFSET + (14 * XMM_SIZE)) 101 #define PROBE_CPU_XMM15_OFFSET (PROBE_FIRST_XMM_OFFSET + (15 * XMM_SIZE)) 102 #define PROBE_SIZE (PROBE_CPU_XMM15_OFFSET + XMM_SIZE) 103 #endif // CPU(X86_64) 96 104 97 105 // These ASSERTs remind you that if you change the layout of ProbeContext, … … 133 141 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm7) == PROBE_CPU_XMM7_OFFSET, ProbeContext_cpu_xmm7_offset_matches_ctiMasmProbeTrampoline); 134 142 143 #if CPU(X86_64) 144 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm8) == PROBE_CPU_XMM8_OFFSET, ProbeContext_cpu_xmm8_offset_matches_ctiMasmProbeTrampoline); 145 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm9) == PROBE_CPU_XMM9_OFFSET, ProbeContext_cpu_xmm9_offset_matches_ctiMasmProbeTrampoline); 146 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm10) == PROBE_CPU_XMM10_OFFSET, ProbeContext_cpu_xmm10_offset_matches_ctiMasmProbeTrampoline); 147 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm11) == PROBE_CPU_XMM11_OFFSET, ProbeContext_cpu_xmm11_offset_matches_ctiMasmProbeTrampoline); 148 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm12) == PROBE_CPU_XMM12_OFFSET, ProbeContext_cpu_xmm12_offset_matches_ctiMasmProbeTrampoline); 149 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm13) == PROBE_CPU_XMM13_OFFSET, ProbeContext_cpu_xmm13_offset_matches_ctiMasmProbeTrampoline); 150 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm14) == PROBE_CPU_XMM14_OFFSET, ProbeContext_cpu_xmm14_offset_matches_ctiMasmProbeTrampoline); 151 COMPILE_ASSERT(PROBE_OFFSETOF(cpu.xmm15) == PROBE_CPU_XMM15_OFFSET, ProbeContext_cpu_xmm15_offset_matches_ctiMasmProbeTrampoline); 152 #endif // CPU(X86_64) 153 135 154 COMPILE_ASSERT(sizeof(MacroAssembler::ProbeContext) == PROBE_SIZE, ProbeContext_size_matches_ctiMasmProbeTrampoline); 136 155 137 // Also double check that the xmm registers are 16 byte (128-bit) aligned as138 // required by the movdqa instruction used in the trampoline.139 COMPILE_ASSERT(!(PROBE_OFFSETOF(cpu.xmm0) % 16), ProbeContext_xmm0_offset_not_aligned_properly);140 156 #undef PROBE_OFFSETOF 141 157 -
trunk/Source/JavaScriptCore/jit/JITStubsX86_64.h
r176031 r176095 1 1 /* 2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.2 * Copyright (C) 2008, 2009, 2013, 2014 Apple Inc. All rights reserved. 3 3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> 4 4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved. … … 105 105 "movq %r15, " STRINGIZE_VALUE_OF(PROBE_CPU_R15_OFFSET) "(%rbp)" "\n" 106 106 107 "movdqa %xmm0, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp)" "\n" 108 "movdqa %xmm1, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp)" "\n" 109 "movdqa %xmm2, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp)" "\n" 110 "movdqa %xmm3, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp)" "\n" 111 "movdqa %xmm4, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp)" "\n" 112 "movdqa %xmm5, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp)" "\n" 113 "movdqa %xmm6, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp)" "\n" 114 "movdqa %xmm7, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp)" "\n" 107 "movq %xmm0, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp)" "\n" 108 "movq %xmm1, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp)" "\n" 109 "movq %xmm2, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp)" "\n" 110 "movq %xmm3, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp)" "\n" 111 "movq %xmm4, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp)" "\n" 112 "movq %xmm5, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp)" "\n" 113 "movq %xmm6, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp)" "\n" 114 "movq %xmm7, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp)" "\n" 115 "movq %xmm8, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM8_OFFSET) "(%rbp)" "\n" 116 "movq %xmm9, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM9_OFFSET) "(%rbp)" "\n" 117 "movq %xmm10, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM10_OFFSET) "(%rbp)" "\n" 118 "movq %xmm11, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM11_OFFSET) "(%rbp)" "\n" 119 "movq %xmm12, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM12_OFFSET) "(%rbp)" "\n" 120 "movq %xmm13, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM13_OFFSET) "(%rbp)" "\n" 121 "movq %xmm14, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM14_OFFSET) "(%rbp)" "\n" 122 "movq %xmm15, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM15_OFFSET) "(%rbp)" "\n" 115 123 116 124 "movq %rbp, %rdi" "\n" // the ProbeContext* arg. … … 134 142 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R15_OFFSET) "(%rbp), %r15" "\n" 135 143 136 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp), %xmm0" "\n" 137 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp), %xmm1" "\n" 138 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp), %xmm2" "\n" 139 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp), %xmm3" "\n" 140 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp), %xmm4" "\n" 141 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp), %xmm5" "\n" 142 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp), %xmm6" "\n" 143 "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp), %xmm7" "\n" 144 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp), %xmm0" "\n" 145 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp), %xmm1" "\n" 146 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp), %xmm2" "\n" 147 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp), %xmm3" "\n" 148 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp), %xmm4" "\n" 149 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp), %xmm5" "\n" 150 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp), %xmm6" "\n" 151 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp), %xmm7" "\n" 152 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM8_OFFSET) "(%rbp), %xmm8" "\n" 153 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM9_OFFSET) "(%rbp), %xmm9" "\n" 154 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM10_OFFSET) "(%rbp), %xmm10" "\n" 155 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM11_OFFSET) "(%rbp), %xmm11" "\n" 156 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM12_OFFSET) "(%rbp), %xmm12" "\n" 157 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM13_OFFSET) "(%rbp), %xmm13" "\n" 158 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM14_OFFSET) "(%rbp), %xmm14" "\n" 159 "movq " STRINGIZE_VALUE_OF(PROBE_CPU_XMM15_OFFSET) "(%rbp), %xmm15" "\n" 144 160 145 161 // There are 6 more registers left to restore:
Note: See TracChangeset
for help on using the changeset viewer.