diff options
Diffstat (limited to 'cpp/src/IceUtil/Shared.cpp')
-rw-r--r-- | cpp/src/IceUtil/Shared.cpp | 94 |
1 files changed, 78 insertions, 16 deletions
diff --git a/cpp/src/IceUtil/Shared.cpp b/cpp/src/IceUtil/Shared.cpp index f18b9385798..1121ab3c823 100644 --- a/cpp/src/IceUtil/Shared.cpp +++ b/cpp/src/IceUtil/Shared.cpp @@ -10,7 +10,79 @@ #include <IceUtil/Shared.h> using namespace IceUtil; -using namespace IceUtilInternal; + +#ifdef ICE_HAS_ATOMIC_FUNCTIONS + +namespace IceUtilInternal +{ + +// +// Linux only. Unfortunately, asm/atomic.h builds non-SMP safe code +// with non-SMP kernels. This means that executables compiled with a +// non-SMP kernel would fail randomly due to concurrency errors with +// reference counting on SMP hosts. Therefore the relevent pieces of +// atomic.h are more-or-less duplicated. +// + +/* + * atomicInc - increment ice_atomic variable + * @v: pointer of type AtomicCounter + * + * Atomically increments @v by 1. Note that the guaranteed useful + * range of an AtomicCounter is only 24 bits. + * + * Inlined because this operation is performance critical. + */ +static inline void atomicInc(volatile int* counter) +{ + __asm__ __volatile__( + "lock ; incl %0" + :"=m" (*counter) + :"m" (*counter)); +} + +/** + * atomicDecAndTest - decrement and test + * @v: pointer of type AtomicCounter + * + * Atomically decrements @v by 1 and returns true if the result is 0, + * or false for all other cases. Note that the guaranteed useful + * range of an AtomicCounter is only 24 bits. + * + * Inlined because this operation is performance critical. + */ +static inline int atomicDecAndTest(volatile int* counter) +{ + unsigned char c; + __asm__ __volatile__( + "lock ; decl %0; sete %1" + :"=m" (*counter), "=qm" (c) + :"m" (*counter) : "memory"); + return c != 0; +} + +/** + * atomicExchangeAdd - same as InterlockedExchangeAdd. This + * didn't come from atomic.h (the code was derived from similar code + * in /usr/include/asm/rwsem.h) + * + * Inlined because this operation is performance critical. + */ +static inline int atomicExchangeAdd(volatile int* counter, int i) +{ + int tmp = i; + __asm__ __volatile__( + "lock ; xadd %0,(%2)" + :"+r"(tmp), "=m"(*counter) + :"r"(counter), "m"(*counter) + : "memory"); + return tmp + i; +} + +} + +#endif + IceUtil::SimpleShared::SimpleShared() : _ref(0), @@ -25,25 +97,15 @@ IceUtil::SimpleShared::SimpleShared(const SimpleShared&) : } IceUtil::Shared::Shared() : -#ifndef ICE_HAS_ATOMIC_FUNCTIONS _ref(0), -#endif _noDelete(false) { -#ifdef ICE_HAS_ATOMIC_FUNCTIONS - _ref.atomicSet(0); -#endif } IceUtil::Shared::Shared(const Shared&) : -#ifndef ICE_HAS_ATOMIC_FUNCTIONS _ref(0), -#endif _noDelete(false) { -#ifdef ICE_HAS_ATOMIC_FUNCTIONS - _ref.atomicSet(0); -#endif } void @@ -53,8 +115,8 @@ IceUtil::Shared::__incRef() assert(InterlockedExchangeAdd(&_ref, 0) >= 0); InterlockedIncrement(&_ref); #elif defined(ICE_HAS_ATOMIC_FUNCTIONS) - assert(_ref.atomicExchangeAdd(&_ref) >= 0); - _ref.atomicInc(); + assert(IceUtilInternal::atomicExchangeAdd(&_ref, 0) >= 0); + IceUtilInternal::atomicInc(&_ref); #else _mutex.lock(); assert(_ref >= 0); @@ -74,8 +136,8 @@ IceUtil::Shared::__decRef() delete this; } #elif defined(ICE_HAS_ATOMIC_FUNCTIONS) - assert(_ref.atomicExchangeAdd(0) > 0); - if(_ref.atomicDecAndTest() && !_noDelete) + assert(IceUtilInternal::atomicExchangeAdd(&_ref, 0) > 0); + if(IceUtilInternal::atomicDecAndTest(&_ref) && !_noDelete) { _noDelete = true; delete this; @@ -103,7 +165,7 @@ IceUtil::Shared::__getRef() const #if defined(_WIN32) return InterlockedExchangeAdd(const_cast<LONG*>(&_ref), 0); #elif defined(ICE_HAS_ATOMIC_FUNCTIONS) - return const_cast<IceUtilInternal::AtomicCounter*>(&_ref)->atomicExchangeAdd(0); + return IceUtilInternal::atomicExchangeAdd(const_cast<volatile int*>(&_ref), 0); #else _mutex.lock(); int ref = _ref; |