27 #if __cplusplus >= 201103L 31 #define USE_CPP11_ATOMICS 1 33 #elif defined( _MSC_VER ) 36 #define USE_MSVC_INTRINSICS 1 38 #elif defined( __GNUC__ ) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)) 42 #define USE_GCC_BUILTIN_ATOMICS 1 44 #elif defined( __GNUC__ ) && (__GNUC__ == 4 && __GNUC_MINOR__ >= 1) 47 #define USE_GCC_BUILTIN_ATOMICS_OLD 1 57 #define THREAD_LOCAL_STATIC thread_local static 61 std::atomic<int>* aDest =
reinterpret_cast<std::atomic<int>*
>(&
mLock);
63 return std::atomic_compare_exchange_weak_explicit( aDest, &expected,
int(1), std::memory_order_acq_rel, std::memory_order_acquire );
77 std::atomic<int>* aDest =
reinterpret_cast<std::atomic<int>*
>(&
mLock);
78 std::atomic_store_explicit( aDest,
int(0), std::memory_order_release );
82 #elif USE_MSVC_INTRINSICS 84 #define WIN32_LEAN_AND_MEAN 89 #define THREAD_LOCAL_STATIC __declspec( thread ) static 94 volatile long* aDest =
reinterpret_cast<long*
>(&
mLock);
95 return ( 0 == _InterlockedCompareExchange( aDest, 1, 0) );
109 volatile long* aDest =
reinterpret_cast<long*
>( &
mLock );
110 _InterlockedExchange( aDest, 0 );
113 #elif USE_GCC_BUILTIN_ATOMICS 115 #define THREAD_LOCAL_STATIC static __thread 122 const int memOrderSuccess = __ATOMIC_ACQ_REL;
123 const int memOrderFail = __ATOMIC_ACQUIRE;
124 return __atomic_compare_exchange_n(&
mLock, &expected,
int(1), weak, memOrderSuccess, memOrderFail);
138 __atomic_store_n(&
mLock,
int(0), __ATOMIC_RELEASE);
141 #elif USE_GCC_BUILTIN_ATOMICS_OLD 144 #define THREAD_LOCAL_STATIC static __thread 148 return __sync_bool_compare_and_swap(&
mLock,
int(0),
int(1));
163 __sync_fetch_and_and(&
mLock,
int(0));
166 #else //#elif USE_MSVC_INTRINSICS 168 #error "no threading primitives defined -- unknown platform" 170 #endif //#else //#elif USE_MSVC_INTRINSICS 173 struct ThreadsafeCounter
175 unsigned int mCounter;
178 ThreadsafeCounter() {mCounter=0;}
180 unsigned int getNext()
184 unsigned int val = mCounter++;
190 static ThreadsafeCounter gThreadCounter;
194 unsigned int btGetCurrentThreadIndex()
196 const unsigned int kNullIndex = ~0
U;
197 THREAD_LOCAL_STATIC
unsigned int sThreadIndex = kNullIndex;
198 if ( sThreadIndex == kNullIndex )
200 sThreadIndex = gThreadCounter.getNext();
205 bool btIsMainThread()
207 return btGetCurrentThreadIndex() == 0;
210 #else // #if BT_THREADSAFE 215 btAssert(!
"unimplemented btSpinMutex::lock() called");
220 btAssert(!
"unimplemented btSpinMutex::unlock() called");
225 btAssert(!
"unimplemented btSpinMutex::tryLock() called");
230 #endif // #if BT_THREADSAFE
btSpinMutex – lightweight spin-mutex implemented with atomic ops, never puts a thread to sleep beca...