summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/os/freebsd/spl/sys/atomic.h136
1 files changed, 65 insertions, 71 deletions
diff --git a/include/os/freebsd/spl/sys/atomic.h b/include/os/freebsd/spl/sys/atomic.h
index e283c6c0e..4227e5f7d 100644
--- a/include/os/freebsd/spl/sys/atomic.h
+++ b/include/os/freebsd/spl/sys/atomic.h
@@ -32,90 +32,53 @@
#include <sys/types.h>
#include <machine/atomic.h>
-#define casptr(_a, _b, _c) \
- atomic_cmpset_ptr((volatile uintptr_t *)(_a), \
- (uintptr_t)(_b), \
- (uintptr_t)(_c))
-#define cas32 atomic_cmpset_32
-#define atomic_sub_64 atomic_subtract_64
-
-#if defined(__i386__) || defined(KLD_MODULE)
+#define atomic_sub_64 atomic_subtract_64
+
+#if defined(__i386__) && (defined(_KERNEL) || defined(KLD_MODULE))
#define I386_HAVE_ATOMIC64
#endif
+#if defined(__i386__) || defined(__amd64__) || defined(__arm__)
+/* No spurious failures from fcmpset. */
+#define STRONG_FCMPSET
+#endif
+
#if !defined(__LP64__) && !defined(__mips_n32) && \
- !defined(ARM_HAVE_ATOMIC64) && !defined(I386_HAVE_ATOMIC64)
+ !defined(ARM_HAVE_ATOMIC64) && !defined(I386_HAVE_ATOMIC64) && \
+ !defined(HAS_EMULATED_ATOMIC64)
extern void atomic_add_64(volatile uint64_t *target, int64_t delta);
extern void atomic_dec_64(volatile uint64_t *target);
+extern uint64_t atomic_swap_64(volatile uint64_t *a, uint64_t value);
+extern uint64_t atomic_load_64(volatile uint64_t *a);
+extern uint64_t atomic_add_64_nv(volatile uint64_t *target, int64_t delta);
+extern uint64_t atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
+ uint64_t newval);
#endif
-#ifndef __sparc64__
-#if defined(__LP64__) || defined(__mips_n32) || \
- defined(ARM_HAVE_ATOMIC64) || defined(I386_HAVE_ATOMIC64)
-
-#define membar_producer() wmb()
-
-static __inline uint64_t
-atomic_cas_64(volatile uint64_t *target, uint64_t cmp, uint64_t newval)
-{
-#ifdef __i386__
- atomic_fcmpset_64(target, &cmp, newval);
-#else
- atomic_fcmpset_long(target, &cmp, newval);
-#endif
- return (cmp);
-}
+#define membar_producer atomic_thread_fence_rel
static __inline uint32_t
-atomic_cas_32(volatile uint32_t *target, uint32_t cmp, uint32_t newval)
+atomic_add_32_nv(volatile uint32_t *target, int32_t delta)
{
-
- atomic_fcmpset_int(target, &cmp, newval);
- return (cmp);
+ return (atomic_fetchadd_32(target, delta) + delta);
}
-static __inline uint64_t
-atomic_add_64_nv(volatile uint64_t *target, int64_t delta)
+static __inline uint_t
+atomic_add_int_nv(volatile uint_t *target, int delta)
{
- uint64_t prev;
-
- prev = atomic_fetchadd_long(target, delta);
-
- return (prev + delta);
+ return (atomic_add_32_nv(target, delta));
}
-#else
-extern uint32_t atomic_cas_32(volatile uint32_t *target, uint32_t cmp,
- uint32_t newval);
-extern uint64_t atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
- uint64_t newval);
-extern uint64_t atomic_add_64_nv(volatile uint64_t *target, int64_t delta);
-extern void membar_producer(void);
-#endif
-#endif
-extern uint8_t atomic_or_8_nv(volatile uint8_t *target, uint8_t value);
-
-#if defined(__sparc64__) || defined(__powerpc__) || defined(__arm__) || \
- defined(__mips__) || defined(__aarch64__) || defined(__riscv)
-extern void atomic_or_8(volatile uint8_t *target, uint8_t value);
-#else
static __inline void
-atomic_or_8(volatile uint8_t *target, uint8_t value)
-{
- atomic_set_8(target, value);
-}
-#endif
-
-static __inline uint32_t
-atomic_add_32_nv(volatile uint32_t *target, int32_t delta)
+atomic_inc_32(volatile uint32_t *target)
{
- return (atomic_fetchadd_32(target, delta) + delta);
+ atomic_add_32(target, 1);
}
static __inline uint32_t
-atomic_add_int_nv(volatile uint32_t *target, int delta)
+atomic_inc_32_nv(volatile uint32_t *target)
{
- return (atomic_add_32_nv(target, delta));
+ return (atomic_add_32_nv(target, 1));
}
static __inline void
@@ -127,29 +90,60 @@ atomic_dec_32(volatile uint32_t *target)
static __inline uint32_t
atomic_dec_32_nv(volatile uint32_t *target)
{
- return (atomic_fetchadd_32(target, -1) - 1);
+ return (atomic_add_32_nv(target, -1));
+}
+
+#ifndef __sparc64__
+static inline uint32_t
+atomic_cas_32(volatile uint32_t *target, uint32_t cmp, uint32_t newval)
+{
+#ifdef STRONG_FCMPSET
+ (void) atomic_fcmpset_32(target, &cmp, newval);
+#else
+ uint32_t expected = cmp;
+
+ do {
+ if (atomic_fcmpset_32(target, &cmp, newval))
+ break;
+ } while (cmp == expected);
+#endif
+ return (cmp);
}
+#endif
#if defined(__LP64__) || defined(__mips_n32) || \
- defined(ARM_HAVE_ATOMIC64) || defined(I386_HAVE_ATOMIC64)
+ defined(ARM_HAVE_ATOMIC64) || defined(I386_HAVE_ATOMIC64) || \
+ defined(HAS_EMULATED_ATOMIC64)
static __inline void
atomic_dec_64(volatile uint64_t *target)
{
atomic_subtract_64(target, 1);
}
-#endif
-static __inline void
-atomic_inc_32(volatile uint32_t *target)
+static inline uint64_t
+atomic_add_64_nv(volatile uint64_t *target, int64_t delta)
{
- atomic_add_32(target, 1);
+ return (atomic_fetchadd_64(target, delta) + delta);
}
-static __inline uint32_t
-atomic_inc_32_nv(volatile uint32_t *target)
+#ifndef __sparc64__
+static inline uint64_t
+atomic_cas_64(volatile uint64_t *target, uint64_t cmp, uint64_t newval)
{
- return (atomic_add_32_nv(target, 1));
+#ifdef STRONG_FCMPSET
+ (void) atomic_fcmpset_64(target, &cmp, newval);
+#else
+ uint64_t expected = cmp;
+
+ do {
+ if (atomic_fcmpset_64(target, &cmp, newval))
+ break;
+ } while (cmp == expected);
+#endif
+ return (cmp);
}
+#endif
+#endif
static __inline void
atomic_inc_64(volatile uint64_t *target)