@@ -296,8 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
return old - 1;
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
@@ -603,7 +603,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
#define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* !CONFIG_GENERIC_ATOMIC64 */
@@ -534,7 +534,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
#define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* !CONFIG_GENERIC_ATOMIC64 */
#endif
@@ -204,7 +204,5 @@
#define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u)
#define atomic64_andnot atomic64_andnot
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
#endif
#endif
@@ -246,8 +246,6 @@ static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u);
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
{
long c, old, dec;
@@ -644,8 +644,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u);
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
@@ -305,8 +305,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u);
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
/*
* atomic64_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic_t
@@ -582,6 +582,7 @@ static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
return t1 != 0;
}
+#define atomic64_inc_not_zero(v) atomic64_inc_not_zero((v))
#endif /* __powerpc64__ */
@@ -375,13 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
}
#endif
-#ifndef CONFIG_GENERIC_ATOMIC64
-static __always_inline long atomic64_inc_not_zero(atomic64_t *v)
-{
- return atomic64_add_unless(v, 1, 0);
-}
-#endif
-
/*
* atomic_{cmp,}xchg is required to have exactly the same ordering semantics as
* {cmp,}xchg and the operations that return, so they need a full barrier.
@@ -212,6 +212,5 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
#define atomic64_dec(_v) atomic64_sub(1, _v)
#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#endif /* __ARCH_S390_ATOMIC__ */
@@ -123,8 +123,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u);
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
long atomic64_dec_if_positive(atomic64_t *v);
#endif /* !(__ARCH_SPARC64_ATOMIC__) */
@@ -207,8 +207,6 @@ static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u)
return true;
}
-#define arch_atomic64_inc_not_zero(v) arch_atomic64_add_unless((v), 1, 0)
-
/*
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic_t
@@ -205,11 +205,14 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v)
return arch_atomic64_dec_return(v);
}
+#ifdef arch_atomic64_inc_not_zero
+#define atomic64_inc_not_zero atomic64_inc_not_zero
static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_inc_not_zero(v);
}
+#endif
static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
{
@@ -62,6 +62,5 @@ extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
#define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* _ASM_GENERIC_ATOMIC64_H */
@@ -1048,6 +1048,17 @@ static inline int atomic_dec_if_positive(atomic_t *v)
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
#endif /* atomic64_try_cmpxchg */
+/**
+ * atomic64_inc_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1, so long as @v is non-zero.
+ * Returns non-zero if @v was non-zero, and zero otherwise.
+ */
+#ifndef atomic64_inc_not_zero
+#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
+#endif
+
#ifndef atomic64_andnot
static inline void atomic64_andnot(long long i, atomic64_t *v)
{
We define a trivial fallback for atomic_inc_not_zero(), but don't do the same for atmic64_inc_not_zero(), leading most architectures to define the same boilerplate. Let's add a fallback in <linux/atomic.h>, and remove the redundant implementations. Note that atomic64_add_unless() is always defined in <linux/atomic.h>, and promotes its arguments to the requisite types, so we need not do this explicitly. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Will Deacon <will.deacon@arm.com> --- arch/alpha/include/asm/atomic.h | 2 -- arch/arc/include/asm/atomic.h | 1 - arch/arm/include/asm/atomic.h | 1 - arch/arm64/include/asm/atomic.h | 2 -- arch/ia64/include/asm/atomic.h | 2 -- arch/mips/include/asm/atomic.h | 2 -- arch/parisc/include/asm/atomic.h | 2 -- arch/powerpc/include/asm/atomic.h | 1 + arch/riscv/include/asm/atomic.h | 7 ------- arch/s390/include/asm/atomic.h | 1 - arch/sparc/include/asm/atomic_64.h | 2 -- arch/x86/include/asm/atomic64_64.h | 2 -- include/asm-generic/atomic-instrumented.h | 3 +++ include/asm-generic/atomic64.h | 1 - include/linux/atomic.h | 11 +++++++++++ 15 files changed, 15 insertions(+), 25 deletions(-) -- 2.11.0