@@ -32,7 +32,9 @@ static inline void atomic_##op(int i, at
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN(__LL_SC_ATOMIC(op), \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN(__LL_SC_ATOMIC(op), \
" " #asm_op " %w[i], %[v]\n") \
: [i] "+r" (w0), [v] "+Q" (v->counter) \
: "r" (x1) \
@@ -52,7 +54,9 @@ static inline int atomic_fetch_##op##nam
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC(fetch_##op##name), \
/* LSE atomics */ \
@@ -84,7 +88,9 @@ static inline int atomic_add_return##nam
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC(add_return##name) \
__nops(1), \
@@ -110,7 +116,9 @@ static inline void atomic_and(int i, ato
register int w0 asm ("w0") = i;
register atomic_t *x1 asm ("x1") = v;
- asm volatile(ARM64_LSE_ATOMIC_INSN(
+ asm volatile(
+ __LSE_PREAMBLE
+ ARM64_LSE_ATOMIC_INSN(
/* LL/SC */
__LL_SC_ATOMIC(and)
__nops(1),
@@ -128,7 +136,9 @@ static inline int atomic_fetch_and##name
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC(fetch_and##name) \
__nops(1), \
@@ -154,7 +164,9 @@ static inline void atomic_sub(int i, ato
register int w0 asm ("w0") = i;
register atomic_t *x1 asm ("x1") = v;
- asm volatile(ARM64_LSE_ATOMIC_INSN(
+ asm volatile(
+ __LSE_PREAMBLE
+ ARM64_LSE_ATOMIC_INSN(
/* LL/SC */
__LL_SC_ATOMIC(sub)
__nops(1),
@@ -172,7 +184,9 @@ static inline int atomic_sub_return##nam
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC(sub_return##name) \
__nops(2), \
@@ -200,7 +214,9 @@ static inline int atomic_fetch_sub##name
register int w0 asm ("w0") = i; \
register atomic_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC(fetch_sub##name) \
__nops(1), \
@@ -229,7 +245,9 @@ static inline void atomic64_##op(long i,
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN(__LL_SC_ATOMIC64(op), \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN(__LL_SC_ATOMIC64(op), \
" " #asm_op " %[i], %[v]\n") \
: [i] "+r" (x0), [v] "+Q" (v->counter) \
: "r" (x1) \
@@ -249,7 +267,9 @@ static inline long atomic64_fetch_##op##
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC64(fetch_##op##name), \
/* LSE atomics */ \
@@ -281,7 +301,9 @@ static inline long atomic64_add_return##
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC64(add_return##name) \
__nops(1), \
@@ -307,7 +329,9 @@ static inline void atomic64_and(long i,
register long x0 asm ("x0") = i;
register atomic64_t *x1 asm ("x1") = v;
- asm volatile(ARM64_LSE_ATOMIC_INSN(
+ asm volatile(
+ __LSE_PREAMBLE
+ ARM64_LSE_ATOMIC_INSN(
/* LL/SC */
__LL_SC_ATOMIC64(and)
__nops(1),
@@ -325,7 +349,9 @@ static inline long atomic64_fetch_and##n
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC64(fetch_and##name) \
__nops(1), \
@@ -351,7 +377,9 @@ static inline void atomic64_sub(long i,
register long x0 asm ("x0") = i;
register atomic64_t *x1 asm ("x1") = v;
- asm volatile(ARM64_LSE_ATOMIC_INSN(
+ asm volatile(
+ __LSE_PREAMBLE
+ ARM64_LSE_ATOMIC_INSN(
/* LL/SC */
__LL_SC_ATOMIC64(sub)
__nops(1),
@@ -369,7 +397,9 @@ static inline long atomic64_sub_return##
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC64(sub_return##name) \
__nops(2), \
@@ -397,7 +427,9 @@ static inline long atomic64_fetch_sub##n
register long x0 asm ("x0") = i; \
register atomic64_t *x1 asm ("x1") = v; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_ATOMIC64(fetch_sub##name) \
__nops(1), \
@@ -422,7 +454,9 @@ static inline long atomic64_dec_if_posit
{
register long x0 asm ("x0") = (long)v;
- asm volatile(ARM64_LSE_ATOMIC_INSN(
+ asm volatile(
+ __LSE_PREAMBLE
+ ARM64_LSE_ATOMIC_INSN(
/* LL/SC */
__LL_SC_ATOMIC64(dec_if_positive)
__nops(6),
@@ -455,7 +489,9 @@ static inline unsigned long __cmpxchg_ca
register unsigned long x1 asm ("x1") = old; \
register unsigned long x2 asm ("x2") = new; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_CMPXCHG(name) \
__nops(2), \
@@ -507,7 +543,9 @@ static inline long __cmpxchg_double##nam
register unsigned long x3 asm ("x3") = new2; \
register unsigned long x4 asm ("x4") = (unsigned long)ptr; \
\
- asm volatile(ARM64_LSE_ATOMIC_INSN( \
+ asm volatile( \
+ __LSE_PREAMBLE \
+ ARM64_LSE_ATOMIC_INSN( \
/* LL/SC */ \
__LL_SC_CMPXCHG_DBL(name) \
__nops(3), \
@@ -4,6 +4,8 @@
#if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS)
+#define __LSE_PREAMBLE ".arch armv8-a+lse\n"
+
#include <linux/compiler_types.h>
#include <linux/export.h>
#include <linux/stringify.h>
@@ -20,8 +22,6 @@
#else /* __ASSEMBLER__ */
-__asm__(".arch_extension lse");
-
/* Move the ll/sc atomics out-of-line */
#define __LL_SC_INLINE notrace
#define __LL_SC_PREFIX(x) __ll_sc_##x
@@ -33,7 +33,7 @@ __asm__(".arch_extension lse");
/* In-line patching at runtime */
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) \
- ALTERNATIVE(llsc, lse, ARM64_HAS_LSE_ATOMICS)
+ ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS)
#endif /* __ASSEMBLER__ */
#else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */