@@ -256,6 +256,31 @@ do { \
})
#endif
+/**
+ * smp_vcond_load_relaxed() - (Spin) wait until an expected value at address
+ * with no ordering guarantees. Spins until `(*addr & mask) == val` or
+ * `nsecs` elapse, and returns the last observed `*addr` value.
+ *
+ * @nsecs: timeout in nanoseconds
+ * @addr: pointer to an integer
+ * @mask: a bit mask applied to read values
+ * @val: Expected value with mask
+ */
+#ifndef smp_vcond_load_relaxed
+#define smp_vcond_load_relaxed(nsecs, addr, mask, val) ({ \
+ const u64 __start = local_clock_noinstr(); \
+ u64 __nsecs = (nsecs); \
+ typeof(addr) __addr = (addr); \
+ typeof(*__addr) __mask = (mask); \
+ typeof(*__addr) __val = (val); \
+ typeof(*__addr) __cur; \
+ smp_cond_load_relaxed(__addr, ( \
+ (VAL & __mask) == __val || \
+ local_clock_noinstr() - __start > __nsecs \
+ )); \
+})
+#endif
+
/**
* smp_cond_load_acquire() - (Spin) wait for cond with ACQUIRE ordering
* @ptr: pointer to the variable to wait on
Relaxed poll until desired mask/value is observed at the specified address or timeout. This macro is a specialization of the generic smp_cond_load_relaxed(), which takes a simple mask/value condition (vcond) instead of an arbitrary expression. It allows architectures to better specialize the implementation, e.g. to enable wfe() polling of the address on arm. Signed-off-by: Haris Okanovic <harisokn@amazon.com> --- include/asm-generic/barrier.h | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+)