Changeset 120356 in webkit


Ignore:
Timestamp:
Jun 14, 2012 1:18:23 PM (12 years ago)
Author:
ggaren@apple.com
Message:

ARMv7 should support spinlocks
https://bugs.webkit.org/show_bug.cgi?id=88957

Reviewed by Darin Adler.

More info @ http://infocenter.arm.com/help/topic/
com.arm.doc.genc007826/Barrier_Litmus_Tests_and_Cookbook_A08.pdf

  • wtf/Atomics.h:

(WTF::memoryBarrierAfterLock):
(WTF::memoryBarrierBeforeUnlock): Added memory barrier primitives since
ARMv7 has a weakly ordered memory model.

  • wtf/Platform.h: Enabled compare-and-swap on Windows so our spinlock

implementation would continue working on Windows.

  • wtf/TCSpinLock.h:

(TCMalloc_SpinLock::Lock):
(TCMalloc_SpinLock::Unlock):
(TCMalloc_SpinLock): Use our compare-and-swap helper function to avoid
rewriting it in assembly here.

Added memory barriers since ARMv7 needs them.

Removed PPC support because our helper function doesn't support PPC.

Location:
trunk/Source/WTF
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WTF/ChangeLog

    r120256 r120356  
     12012-06-14  Geoffrey Garen  <ggaren@apple.com>
     2
     3        ARMv7 should support spinlocks
     4        https://bugs.webkit.org/show_bug.cgi?id=88957
     5
     6        Reviewed by Darin Adler.
     7
     8        More info @ http://infocenter.arm.com/help/topic/
     9        com.arm.doc.genc007826/Barrier_Litmus_Tests_and_Cookbook_A08.pdf
     10
     11        * wtf/Atomics.h:
     12        (WTF::memoryBarrierAfterLock):
     13        (WTF::memoryBarrierBeforeUnlock): Added memory barrier primitives since
     14        ARMv7 has a weakly ordered memory model.
     15
     16        * wtf/Platform.h: Enabled compare-and-swap on Windows so our spinlock
     17        implementation would continue working on Windows.
     18
     19        * wtf/TCSpinLock.h:
     20        (TCMalloc_SpinLock::Lock):
     21        (TCMalloc_SpinLock::Unlock):
     22        (TCMalloc_SpinLock): Use our compare-and-swap helper function to avoid
     23        rewriting it in assembly here.
     24
     25        Added memory barriers since ARMv7 needs them.
     26
     27        Removed PPC support because our helper function doesn't support PPC.
     28
    1292012-06-13  Arnaud Renevier  <arno@renevier.net>
    230
  • trunk/Source/WTF/wtf/Atomics.h

    r118922 r120356  
    207207}
    208208
     209#if CPU(ARM_THUMB2)
     210
     211inline void memoryBarrierAfterLock()
     212{
     213    asm volatile("dmb" ::: "memory");
     214}
     215
     216inline void memoryBarrierBeforeUnlock()
     217{
     218    asm volatile("dmb" ::: "memory");
     219}
     220
     221#else
     222
     223inline void memoryBarrierAfterLock() { }
     224inline void memoryBarrierBeforeUnlock() { }
     225
     226#endif
     227
    209228} // namespace WTF
    210229
  • trunk/Source/WTF/wtf/Platform.h

    r119633 r120356  
    10691069#endif
    10701070
    1071 #if !defined(ENABLE_COMPARE_AND_SWAP) && COMPILER(GCC) && (CPU(X86) || CPU(X86_64) || CPU(ARM_THUMB2))
     1071#if !defined(ENABLE_COMPARE_AND_SWAP) && (OS(WINDOWS) || (COMPILER(GCC) && (CPU(X86) || CPU(X86_64) || CPU(ARM_THUMB2))))
    10721072#define ENABLE_COMPARE_AND_SWAP 1
    10731073#endif
  • trunk/Source/WTF/wtf/TCSpinLock.h

    r117478 r120356  
    3535#define TCMALLOC_INTERNAL_SPINLOCK_H__
    3636
    37 #if (CPU(X86) || CPU(X86_64) || CPU(PPC)) && (COMPILER(GCC) || COMPILER(MSVC))
    38 
    39 #include <stdint.h>
    40 #include <time.h>       /* For nanosleep() */
    41 
    42 #if OS(WINDOWS)
    43 #ifndef WIN32_LEAN_AND_MEAN
    44 #define WIN32_LEAN_AND_MEAN
    45 #endif
    46 #include <windows.h>
    47 #else
    48 #include <sched.h>      /* For sched_yield() */
     37#include <wtf/Atomics.h>
     38#if OS(UNIX)
     39#include <sched.h>
    4940#endif
    5041
    51 static void TCMalloc_SlowLock(volatile unsigned int* lockword);
     42#if ENABLE(COMPARE_AND_SWAP)
     43
     44static void TCMalloc_SlowLock(unsigned* lockword);
    5245
    5346// The following is a struct so that it can be initialized at compile time
    5447struct TCMalloc_SpinLock {
     48    void Lock() {
     49      if (!WTF::weakCompareAndSwap(&lockword_, 0, 1))
     50        TCMalloc_SlowLock(&lockword_);
     51      WTF::memoryBarrierAfterLock();
     52    }
    5553
    56   inline void Lock() {
    57     int r;
    58 #if COMPILER(GCC)
    59 #if CPU(X86) || CPU(X86_64)
    60     __asm__ __volatile__
    61       ("xchgl %0, %1"
    62        : "=r"(r), "=m"(lockword_)
    63        : "0"(1), "m"(lockword_)
    64        : "memory");
    65 #else
    66     volatile unsigned int *lockword_ptr = &lockword_;
    67     __asm__ __volatile__
    68         ("1: lwarx %0, 0, %1\n\t"
    69          "stwcx. %2, 0, %1\n\t"
    70          "bne- 1b\n\t"
    71          "isync"
    72          : "=&r" (r), "=r" (lockword_ptr)
    73          : "r" (1), "1" (lockword_ptr)
    74          : "memory");
    75 #endif
    76 #elif COMPILER(MSVC)
    77     __asm {
    78         mov eax, this    ; store &lockword_ (which is this+0) in eax
    79         mov ebx, 1       ; store 1 in ebx
    80         xchg [eax], ebx  ; exchange lockword_ and 1
    81         mov r, ebx       ; store old value of lockword_ in r
     54    void Unlock() {
     55      WTF::memoryBarrierBeforeUnlock();
     56      lockword_ = 0;
    8257    }
    83 #endif
    84     if (r) TCMalloc_SlowLock(&lockword_);
    85   }
    8658
    87   inline void Unlock() {
    88 #if COMPILER(GCC)
    89 #if CPU(X86) || CPU(X86_64)
    90     __asm__ __volatile__
    91       ("movl $0, %0"
    92        : "=m"(lockword_)
    93        : "m" (lockword_)
    94        : "memory");
    95 #else
    96     __asm__ __volatile__
    97       ("isync\n\t"
    98        "eieio\n\t"
    99        "stw %1, %0"
    100 #if OS(DARWIN) || CPU(PPC)
    101        : "=o" (lockword_)
    102 #else
    103        : "=m" (lockword_)
    104 #endif
    105        : "r" (0)
    106        : "memory");
    107 #endif
    108 #elif COMPILER(MSVC)
    109       __asm {
    110           mov eax, this  ; store &lockword_ (which is this+0) in eax
    111           mov [eax], 0   ; set lockword_ to 0
    112       }
    113 #endif
    114   }
    11559    // Report if we think the lock can be held by this thread.
    11660    // When the lock is truly held by the invoking thread
    11761    // we will always return true.
    11862    // Indended to be used as CHECK(lock.IsHeld());
    119     inline bool IsHeld() const {
     63    bool IsHeld() const {
    12064        return lockword_ != 0;
    12165    }
    12266
    123     inline void Init() { lockword_ = 0; }
    124     inline void Finalize() { }
     67    void Init() { lockword_ = 0; }
     68    void Finalize() { }
    12569
    126     volatile unsigned int lockword_;
     70    unsigned lockword_;
    12771};
    12872
    12973#define SPINLOCK_INITIALIZER { 0 }
    13074
    131 static void TCMalloc_SlowLock(volatile unsigned int* lockword) {
    132   while (true) {
    133     // Yield immediately since fast path failed
     75static void TCMalloc_SlowLock(unsigned* lockword) {
     76  do {
    13477#if OS(WINDOWS)
    13578    Sleep(0);
     
    13780    sched_yield();
    13881#endif
    139 
    140     int r;
    141 #if COMPILER(GCC)
    142 #if CPU(X86) || CPU(X86_64)
    143     __asm__ __volatile__
    144       ("xchgl %0, %1"
    145        : "=r"(r), "=m"(*lockword)
    146        : "0"(1), "m"(*lockword)
    147        : "memory");
    148 
    149 #else
    150     int tmp = 1;
    151     __asm__ __volatile__
    152         ("1: lwarx %0, 0, %1\n\t"
    153          "stwcx. %2, 0, %1\n\t"
    154          "bne- 1b\n\t"
    155          "isync"
    156          : "=&r" (r), "=r" (lockword)
    157          : "r" (tmp), "1" (lockword)
    158          : "memory");
    159 #endif
    160 #elif COMPILER(MSVC)
    161     __asm {
    162         mov eax, lockword     ; assign lockword into eax
    163         mov ebx, 1            ; assign 1 into ebx
    164         xchg [eax], ebx       ; exchange *lockword and 1
    165         mov r, ebx            ; store old value of *lockword in r
    166     }
    167 #endif
    168     if (!r) {
    169       return;
    170     }
    171   }
    172 }
    173 
    174 #elif OS(WINDOWS)
    175 
    176 #ifndef WIN32_LEAN_AND_MEAN
    177 #define WIN32_LEAN_AND_MEAN
    178 #endif
    179 #include <windows.h>
    180 
    181 static void TCMalloc_SlowLock(LPLONG lockword);
    182 
    183 // The following is a struct so that it can be initialized at compile time
    184 struct TCMalloc_SpinLock {
    185 
    186     inline void Lock() {
    187         if (InterlockedExchange(&m_lockword, 1))
    188             TCMalloc_SlowLock(&m_lockword);
    189     }
    190 
    191     inline void Unlock() {
    192         InterlockedExchange(&m_lockword, 0);
    193     }
    194 
    195     inline bool IsHeld() const {
    196         return m_lockword != 0;
    197     }
    198 
    199     inline void Init() { m_lockword = 0; }
    200     inline void Finalize() { }
    201 
    202     LONG m_lockword;
    203 };
    204 
    205 #define SPINLOCK_INITIALIZER { 0 }
    206 
    207 static void TCMalloc_SlowLock(LPLONG lockword) {
    208     Sleep(0);        // Yield immediately since fast path failed
    209     while (InterlockedExchange(lockword, 1))
    210         Sleep(2);
     82  } while (!WTF::weakCompareAndSwap(lockword, 0, 1));
    21183}
    21284
Note: See TracChangeset for help on using the changeset viewer.