@@ -336,8 +336,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
c; \
})
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-
#define atomic_inc(v) atomic_add(1, v)
#define atomic_dec(v) atomic_sub(1, v)
@@ -197,8 +197,6 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
return __oldval;
}
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-
#define atomic_inc(v) atomic_add(1, (v))
#define atomic_dec(v) atomic_sub(1, (v))
@@ -375,15 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
}
#endif
-/*
- * The extra atomic operations that are constructed from one of the core
- * LR/SC-based operations above.
- */
-static __always_inline int atomic_inc_not_zero(atomic_t *v)
-{
- return atomic_fetch_add_unless(v, 1, 0);
-}
-
#ifndef CONFIG_GENERIC_ATOMIC64
static __always_inline long atomic64_inc_not_zero(atomic64_t *v)
{