Add atomic_swap_32() and atomic_swap_64()
The atomic_swap_32() function maps to atomic_xchg(), and the atomic_swap_64() function maps to atomic64_xchg(). Signed-off-by: Tim Chase <tim@chase2k.com> Signed-off-by: Brian Behlendorf <behlendorf1@llnl.gov> Closes #377
This commit is contained in:
parent
7f23e00109
commit
2bf35fb754
|
@ -156,6 +156,19 @@ atomic_cas_32(volatile uint32_t *target, uint32_t cmp,
|
|||
return rc;
|
||||
}
|
||||
|
||||
static __inline__ uint32_t
|
||||
atomic_swap_32(volatile uint32_t *target, uint32_t newval)
|
||||
{
|
||||
uint32_t rc;
|
||||
|
||||
spin_lock(&atomic32_lock);
|
||||
rc = *target;
|
||||
*target = newval;
|
||||
spin_unlock(&atomic32_lock);
|
||||
|
||||
return rc;
|
||||
}
|
||||
|
||||
static __inline__ void
|
||||
atomic_inc_64(volatile uint64_t *target)
|
||||
{
|
||||
|
@ -253,6 +266,18 @@ atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
|
|||
return rc;
|
||||
}
|
||||
|
||||
static __inline__ uint64_t
|
||||
atomic_swap_64(volatile uint64_t *target, uint64_t newval)
|
||||
{
|
||||
uint64_t rc;
|
||||
|
||||
spin_lock(&atomic64_lock);
|
||||
rc = *target;
|
||||
*target = newval;
|
||||
spin_unlock(&atomic64_lock);
|
||||
|
||||
return rc;
|
||||
}
|
||||
|
||||
#else /* ATOMIC_SPINLOCK */
|
||||
|
||||
|
@ -265,6 +290,7 @@ atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
|
|||
#define atomic_add_32_nv(v, i) atomic_add_return((i), (atomic_t *)(v))
|
||||
#define atomic_sub_32_nv(v, i) atomic_sub_return((i), (atomic_t *)(v))
|
||||
#define atomic_cas_32(v, x, y) atomic_cmpxchg((atomic_t *)(v), x, y)
|
||||
#define atomic_swap_32(v, x) atomic_xchg((atomic_t *)(v), x)
|
||||
#define atomic_inc_64(v) atomic64_inc((atomic64_t *)(v))
|
||||
#define atomic_dec_64(v) atomic64_dec((atomic64_t *)(v))
|
||||
#define atomic_add_64(v, i) atomic64_add((i), (atomic64_t *)(v))
|
||||
|
@ -274,6 +300,7 @@ atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
|
|||
#define atomic_add_64_nv(v, i) atomic64_add_return((i), (atomic64_t *)(v))
|
||||
#define atomic_sub_64_nv(v, i) atomic64_sub_return((i), (atomic64_t *)(v))
|
||||
#define atomic_cas_64(v, x, y) atomic64_cmpxchg((atomic64_t *)(v), x, y)
|
||||
#define atomic_swap_64(v, x) atomic64_xchg((atomic64_t *)(v), x)
|
||||
|
||||
#endif /* ATOMIC_SPINLOCK */
|
||||
|
||||
|
|
Loading…
Reference in New Issue