Changeset 120356 in webkit
- Timestamp:
- Jun 14, 2012 1:18:23 PM (12 years ago)
- Location:
- trunk/Source/WTF
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WTF/ChangeLog
r120256 r120356 1 2012-06-14 Geoffrey Garen <ggaren@apple.com> 2 3 ARMv7 should support spinlocks 4 https://bugs.webkit.org/show_bug.cgi?id=88957 5 6 Reviewed by Darin Adler. 7 8 More info @ http://infocenter.arm.com/help/topic/ 9 com.arm.doc.genc007826/Barrier_Litmus_Tests_and_Cookbook_A08.pdf 10 11 * wtf/Atomics.h: 12 (WTF::memoryBarrierAfterLock): 13 (WTF::memoryBarrierBeforeUnlock): Added memory barrier primitives since 14 ARMv7 has a weakly ordered memory model. 15 16 * wtf/Platform.h: Enabled compare-and-swap on Windows so our spinlock 17 implementation would continue working on Windows. 18 19 * wtf/TCSpinLock.h: 20 (TCMalloc_SpinLock::Lock): 21 (TCMalloc_SpinLock::Unlock): 22 (TCMalloc_SpinLock): Use our compare-and-swap helper function to avoid 23 rewriting it in assembly here. 24 25 Added memory barriers since ARMv7 needs them. 26 27 Removed PPC support because our helper function doesn't support PPC. 28 1 29 2012-06-13 Arnaud Renevier <arno@renevier.net> 2 30 -
trunk/Source/WTF/wtf/Atomics.h
r118922 r120356 207 207 } 208 208 209 #if CPU(ARM_THUMB2) 210 211 inline void memoryBarrierAfterLock() 212 { 213 asm volatile("dmb" ::: "memory"); 214 } 215 216 inline void memoryBarrierBeforeUnlock() 217 { 218 asm volatile("dmb" ::: "memory"); 219 } 220 221 #else 222 223 inline void memoryBarrierAfterLock() { } 224 inline void memoryBarrierBeforeUnlock() { } 225 226 #endif 227 209 228 } // namespace WTF 210 229 -
trunk/Source/WTF/wtf/Platform.h
r119633 r120356 1069 1069 #endif 1070 1070 1071 #if !defined(ENABLE_COMPARE_AND_SWAP) && COMPILER(GCC) && (CPU(X86) || CPU(X86_64) || CPU(ARM_THUMB2))1071 #if !defined(ENABLE_COMPARE_AND_SWAP) && (OS(WINDOWS) || (COMPILER(GCC) && (CPU(X86) || CPU(X86_64) || CPU(ARM_THUMB2)))) 1072 1072 #define ENABLE_COMPARE_AND_SWAP 1 1073 1073 #endif -
trunk/Source/WTF/wtf/TCSpinLock.h
r117478 r120356 35 35 #define TCMALLOC_INTERNAL_SPINLOCK_H__ 36 36 37 #if (CPU(X86) || CPU(X86_64) || CPU(PPC)) && (COMPILER(GCC) || COMPILER(MSVC)) 38 39 #include <stdint.h> 40 #include <time.h> /* For nanosleep() */ 41 42 #if OS(WINDOWS) 43 #ifndef WIN32_LEAN_AND_MEAN 44 #define WIN32_LEAN_AND_MEAN 45 #endif 46 #include <windows.h> 47 #else 48 #include <sched.h> /* For sched_yield() */ 37 #include <wtf/Atomics.h> 38 #if OS(UNIX) 39 #include <sched.h> 49 40 #endif 50 41 51 static void TCMalloc_SlowLock(volatile unsigned int* lockword); 42 #if ENABLE(COMPARE_AND_SWAP) 43 44 static void TCMalloc_SlowLock(unsigned* lockword); 52 45 53 46 // The following is a struct so that it can be initialized at compile time 54 47 struct TCMalloc_SpinLock { 48 void Lock() { 49 if (!WTF::weakCompareAndSwap(&lockword_, 0, 1)) 50 TCMalloc_SlowLock(&lockword_); 51 WTF::memoryBarrierAfterLock(); 52 } 55 53 56 inline void Lock() { 57 int r; 58 #if COMPILER(GCC) 59 #if CPU(X86) || CPU(X86_64) 60 __asm__ __volatile__ 61 ("xchgl %0, %1" 62 : "=r"(r), "=m"(lockword_) 63 : "0"(1), "m"(lockword_) 64 : "memory"); 65 #else 66 volatile unsigned int *lockword_ptr = &lockword_; 67 __asm__ __volatile__ 68 ("1: lwarx %0, 0, %1\n\t" 69 "stwcx. %2, 0, %1\n\t" 70 "bne- 1b\n\t" 71 "isync" 72 : "=&r" (r), "=r" (lockword_ptr) 73 : "r" (1), "1" (lockword_ptr) 74 : "memory"); 75 #endif 76 #elif COMPILER(MSVC) 77 __asm { 78 mov eax, this ; store &lockword_ (which is this+0) in eax 79 mov ebx, 1 ; store 1 in ebx 80 xchg [eax], ebx ; exchange lockword_ and 1 81 mov r, ebx ; store old value of lockword_ in r 54 void Unlock() { 55 WTF::memoryBarrierBeforeUnlock(); 56 lockword_ = 0; 82 57 } 83 #endif84 if (r) TCMalloc_SlowLock(&lockword_);85 }86 58 87 inline void Unlock() {88 #if COMPILER(GCC)89 #if CPU(X86) || CPU(X86_64)90 __asm__ __volatile__91 ("movl $0, %0"92 : "=m"(lockword_)93 : "m" (lockword_)94 : "memory");95 #else96 __asm__ __volatile__97 ("isync\n\t"98 "eieio\n\t"99 "stw %1, %0"100 #if OS(DARWIN) || CPU(PPC)101 : "=o" (lockword_)102 #else103 : "=m" (lockword_)104 #endif105 : "r" (0)106 : "memory");107 #endif108 #elif COMPILER(MSVC)109 __asm {110 mov eax, this ; store &lockword_ (which is this+0) in eax111 mov [eax], 0 ; set lockword_ to 0112 }113 #endif114 }115 59 // Report if we think the lock can be held by this thread. 116 60 // When the lock is truly held by the invoking thread 117 61 // we will always return true. 118 62 // Indended to be used as CHECK(lock.IsHeld()); 119 inlinebool IsHeld() const {63 bool IsHeld() const { 120 64 return lockword_ != 0; 121 65 } 122 66 123 inlinevoid Init() { lockword_ = 0; }124 inlinevoid Finalize() { }67 void Init() { lockword_ = 0; } 68 void Finalize() { } 125 69 126 volatile unsigned intlockword_;70 unsigned lockword_; 127 71 }; 128 72 129 73 #define SPINLOCK_INITIALIZER { 0 } 130 74 131 static void TCMalloc_SlowLock(volatile unsigned int* lockword) { 132 while (true) { 133 // Yield immediately since fast path failed 75 static void TCMalloc_SlowLock(unsigned* lockword) { 76 do { 134 77 #if OS(WINDOWS) 135 78 Sleep(0); … … 137 80 sched_yield(); 138 81 #endif 139 140 int r; 141 #if COMPILER(GCC) 142 #if CPU(X86) || CPU(X86_64) 143 __asm__ __volatile__ 144 ("xchgl %0, %1" 145 : "=r"(r), "=m"(*lockword) 146 : "0"(1), "m"(*lockword) 147 : "memory"); 148 149 #else 150 int tmp = 1; 151 __asm__ __volatile__ 152 ("1: lwarx %0, 0, %1\n\t" 153 "stwcx. %2, 0, %1\n\t" 154 "bne- 1b\n\t" 155 "isync" 156 : "=&r" (r), "=r" (lockword) 157 : "r" (tmp), "1" (lockword) 158 : "memory"); 159 #endif 160 #elif COMPILER(MSVC) 161 __asm { 162 mov eax, lockword ; assign lockword into eax 163 mov ebx, 1 ; assign 1 into ebx 164 xchg [eax], ebx ; exchange *lockword and 1 165 mov r, ebx ; store old value of *lockword in r 166 } 167 #endif 168 if (!r) { 169 return; 170 } 171 } 172 } 173 174 #elif OS(WINDOWS) 175 176 #ifndef WIN32_LEAN_AND_MEAN 177 #define WIN32_LEAN_AND_MEAN 178 #endif 179 #include <windows.h> 180 181 static void TCMalloc_SlowLock(LPLONG lockword); 182 183 // The following is a struct so that it can be initialized at compile time 184 struct TCMalloc_SpinLock { 185 186 inline void Lock() { 187 if (InterlockedExchange(&m_lockword, 1)) 188 TCMalloc_SlowLock(&m_lockword); 189 } 190 191 inline void Unlock() { 192 InterlockedExchange(&m_lockword, 0); 193 } 194 195 inline bool IsHeld() const { 196 return m_lockword != 0; 197 } 198 199 inline void Init() { m_lockword = 0; } 200 inline void Finalize() { } 201 202 LONG m_lockword; 203 }; 204 205 #define SPINLOCK_INITIALIZER { 0 } 206 207 static void TCMalloc_SlowLock(LPLONG lockword) { 208 Sleep(0); // Yield immediately since fast path failed 209 while (InterlockedExchange(lockword, 1)) 210 Sleep(2); 82 } while (!WTF::weakCompareAndSwap(lockword, 0, 1)); 211 83 } 212 84
Note: See TracChangeset
for help on using the changeset viewer.