summaryrefslogtreecommitdiff
path: root/cpp/src/IceUtil/Shared.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'cpp/src/IceUtil/Shared.cpp')
-rwxr-xr-xcpp/src/IceUtil/Shared.cpp90
1 files changed, 83 insertions, 7 deletions
diff --git a/cpp/src/IceUtil/Shared.cpp b/cpp/src/IceUtil/Shared.cpp
index 48823e3f21e..de10b77bc7b 100755
--- a/cpp/src/IceUtil/Shared.cpp
+++ b/cpp/src/IceUtil/Shared.cpp
@@ -9,7 +9,83 @@
#include <IceUtil/Shared.h>
+#ifdef ICE_HAS_ATOMIC_FUNCTIONS
+
+namespace IceUtilInternal
+{
+/*
+ * atomicSet - set ice_atomic variable
+ * @v: pointer of type AtomicCounter
+ * @i: required value
+ *
+ * Atomically sets the value of @v to @i. Note that the guaranteed
+ * useful range of an AtomicCounter is only 24 bits.
+ */
+inline void atomicSet(AtomicCounter* v, int i)
+{
+ v->counter = i;
+}
+
+/*
+ * atomicInc - increment ice_atomic variable
+ * @v: pointer of type AtomicCounter
+ *
+ * Atomically increments @v by 1. Note that the guaranteed useful
+ * range of an AtomicCounter is only 24 bits.
+ *
+ * Inlined because this operation is performance critical.
+ */
+inline void atomicInc(AtomicCounter *v)
+{
+ __asm__ __volatile__(
+ "lock ; incl %0"
+ :"=m" (v->counter)
+ :"m" (v->counter));
+}
+
+/**
+ * atomicDecAndTest - decrement and test
+ * @v: pointer of type AtomicCounter
+ *
+ * Atomically decrements @v by 1 and returns true if the result is 0,
+ * or false for all other cases. Note that the guaranteed useful
+ * range of an AtomicCounter is only 24 bits.
+ *
+ * Inlined because this operation is performance critical.
+ */
+inline int atomicDecAndTest(AtomicCounter *v)
+{
+ unsigned char c;
+ __asm__ __volatile__(
+ "lock ; decl %0; sete %1"
+ :"=m" (v->counter), "=qm" (c)
+ :"m" (v->counter) : "memory");
+ return c != 0;
+}
+
+/**
+ * atomicExchangeAdd - same as InterlockedExchangeAdd. This
+ * didn't come from atomic.h (the code was derived from similar code
+ * in /usr/include/asm/rwsem.h)
+ *
+ * Inlined because this operation is performance critical.
+ */
+inline int atomicExchangeAdd(int i, AtomicCounter* v)
+{
+ int tmp = i;
+ __asm__ __volatile__(
+ "lock ; xadd %0,(%2)"
+ :"+r"(tmp), "=m"(v->counter)
+ :"r"(v), "m"(v->counter)
+ : "memory");
+ return tmp + i;
+}
+}
+#endif
+
+
using namespace IceUtil;
+using namespace IceUtilInternal;
IceUtil::SimpleShared::SimpleShared() :
_ref(0),
@@ -30,7 +106,7 @@ IceUtil::Shared::Shared() :
_noDelete(false)
{
#ifdef ICE_HAS_ATOMIC_FUNCTIONS
- ice_atomic_set(&_ref, 0);
+ atomicSet(&_ref, 0);
#endif
}
@@ -41,7 +117,7 @@ IceUtil::Shared::Shared(const Shared&) :
_noDelete(false)
{
#ifdef ICE_HAS_ATOMIC_FUNCTIONS
- ice_atomic_set(&_ref, 0);
+ atomicSet(&_ref, 0);
#endif
}
@@ -52,8 +128,8 @@ IceUtil::Shared::__incRef()
assert(InterlockedExchangeAdd(&_ref, 0) >= 0);
InterlockedIncrement(&_ref);
#elif defined(ICE_HAS_ATOMIC_FUNCTIONS)
- assert(ice_atomic_exchange_add(0, &_ref) >= 0);
- ice_atomic_inc(&_ref);
+ assert(atomicExchangeAdd(0, &_ref) >= 0);
+ atomicInc(&_ref);
#else
_mutex.lock();
assert(_ref >= 0);
@@ -73,8 +149,8 @@ IceUtil::Shared::__decRef()
delete this;
}
#elif defined(ICE_HAS_ATOMIC_FUNCTIONS)
- assert(ice_atomic_exchange_add(0, &_ref) > 0);
- if(ice_atomic_dec_and_test(&_ref) && !_noDelete)
+ assert(atomicExchangeAdd(0, &_ref) > 0);
+ if(atomicDecAndTest(&_ref) && !_noDelete)
{
_noDelete = true;
delete this;
@@ -102,7 +178,7 @@ IceUtil::Shared::__getRef() const
#if defined(_WIN32)
return InterlockedExchangeAdd(const_cast<LONG*>(&_ref), 0);
#elif defined(ICE_HAS_ATOMIC_FUNCTIONS)
- return ice_atomic_exchange_add(0, const_cast<ice_atomic_t*>(&_ref));
+ return atomicExchangeAdd(0, const_cast<AtomicCounter*>(&_ref));
#else
_mutex.lock();
int ref = _ref;