summaryrefslogtreecommitdiffstats
path: root/lib/libspl
diff options
context:
space:
mode:
authorAlexander Motin <[email protected]>2021-06-07 12:02:47 -0400
committerBrian Behlendorf <[email protected]>2021-06-09 13:05:34 -0700
commite76373de7b7384bb6e5c6fd5e04f15b54df20fb7 (patch)
tree5390691ce26f6f3a115c3b0f0d21bbea544ee440 /lib/libspl
parentb05ae1a82ab686f037806dbd932eb3cd5ce34c00 (diff)
More aggsum optimizations
- Avoid atomic_add() when updating as_lower_bound/as_upper_bound. Previous code was excessively strong on 64bit systems while not strong enough on 32bit ones. Instead introduce and use real atomic_load() and atomic_store() operations, just an assignments on 64bit machines, but using proper atomics on 32bit ones to avoid torn reads/writes. - Reduce number of buckets on large systems. Extra buckets not as much improve add speed, as hurt reads. Unlike wmsum for aggsum reads are still important. Reviewed-by: Brian Behlendorf <[email protected]> Reviewed-by: Ryan Moeller <[email protected]> Signed-off-by: Alexander Motin <[email protected]> Sponsored-By: iXsystems, Inc. Closes #12145
Diffstat (limited to 'lib/libspl')
-rw-r--r--lib/libspl/asm-generic/atomic.c13
-rw-r--r--lib/libspl/include/atomic.h43
2 files changed, 56 insertions, 0 deletions
diff --git a/lib/libspl/asm-generic/atomic.c b/lib/libspl/asm-generic/atomic.c
index 35535ea49..504422b8e 100644
--- a/lib/libspl/asm-generic/atomic.c
+++ b/lib/libspl/asm-generic/atomic.c
@@ -390,6 +390,19 @@ atomic_swap_ptr(volatile void *target, void *bits)
return (old);
}
+#ifndef _LP64
+uint64_t
+atomic_load_64(volatile uint64_t *target)
+{
+ return (__atomic_load_n(target, __ATOMIC_RELAXED));
+}
+
+void
+atomic_store_64(volatile uint64_t *target, uint64_t bits)
+{
+ return (__atomic_store_n(target, bits, __ATOMIC_RELAXED));
+}
+#endif
int
atomic_set_long_excl(volatile ulong_t *target, uint_t value)
diff --git a/lib/libspl/include/atomic.h b/lib/libspl/include/atomic.h
index f8c257f96..8dd1d654a 100644
--- a/lib/libspl/include/atomic.h
+++ b/lib/libspl/include/atomic.h
@@ -246,6 +246,49 @@ extern uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
#endif
/*
+ * Atomically read variable.
+ */
+#define atomic_load_char(p) (*(volatile uchar_t *)(p))
+#define atomic_load_short(p) (*(volatile ushort_t *)(p))
+#define atomic_load_int(p) (*(volatile uint_t *)(p))
+#define atomic_load_long(p) (*(volatile ulong_t *)(p))
+#define atomic_load_ptr(p) (*(volatile __typeof(*p) *)(p))
+#define atomic_load_8(p) (*(volatile uint8_t *)(p))
+#define atomic_load_16(p) (*(volatile uint16_t *)(p))
+#define atomic_load_32(p) (*(volatile uint32_t *)(p))
+#ifdef _LP64
+#define atomic_load_64(p) (*(volatile uint64_t *)(p))
+#elif defined(_INT64_TYPE)
+extern uint64_t atomic_load_64(volatile uint64_t *);
+#endif
+
+/*
+ * Atomically write variable.
+ */
+#define atomic_store_char(p, v) \
+ (*(volatile uchar_t *)(p) = (uchar_t)(v))
+#define atomic_store_short(p, v) \
+ (*(volatile ushort_t *)(p) = (ushort_t)(v))
+#define atomic_store_int(p, v) \
+ (*(volatile uint_t *)(p) = (uint_t)(v))
+#define atomic_store_long(p, v) \
+ (*(volatile ulong_t *)(p) = (ulong_t)(v))
+#define atomic_store_ptr(p, v) \
+ (*(volatile __typeof(*p) *)(p) = (v))
+#define atomic_store_8(p, v) \
+ (*(volatile uint8_t *)(p) = (uint8_t)(v))
+#define atomic_store_16(p, v) \
+ (*(volatile uint16_t *)(p) = (uint16_t)(v))
+#define atomic_store_32(p, v) \
+ (*(volatile uint32_t *)(p) = (uint32_t)(v))
+#ifdef _LP64
+#define atomic_store_64(p, v) \
+ (*(volatile uint64_t *)(p) = (uint64_t)(v))
+#elif defined(_INT64_TYPE)
+extern void atomic_store_64(volatile uint64_t *, uint64_t);
+#endif
+
+/*
* Perform an exclusive atomic bit set/clear on a target.
* Returns 0 if bit was successfully set/cleared, or -1
* if the bit was already set/cleared.