diff mbox series

[1/3] cputlb: Disable __always_inline__ without optimization

Message ID 20190911014353.5926-2-richard.henderson@linaro.org
State New
Headers show
Series cputlb: Adjust tlb bswap implementation | expand

Commit Message

Richard Henderson Sept. 11, 2019, 1:43 a.m. UTC
This forced inlining can result in missing symbols,
which makes a debugging build harder to follow.

Reported-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>

---
 accel/tcg/cputlb.c | 16 ++++++++++++++--
 1 file changed, 14 insertions(+), 2 deletions(-)

-- 
2.17.1

Comments

Peter Maydell Sept. 11, 2019, 8:34 a.m. UTC | #1
On Wed, 11 Sep 2019 at 02:43, Richard Henderson
<richard.henderson@linaro.org> wrote:
>

> This forced inlining can result in missing symbols,

> which makes a debugging build harder to follow.

>

> Reported-by: Peter Maydell <peter.maydell@linaro.org>

> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>

> ---

>  accel/tcg/cputlb.c | 16 ++++++++++++++--

>  1 file changed, 14 insertions(+), 2 deletions(-)

>

> diff --git a/accel/tcg/cputlb.c b/accel/tcg/cputlb.c

> index abae79650c..909f01ebcc 100644

> --- a/accel/tcg/cputlb.c

> +++ b/accel/tcg/cputlb.c

> @@ -1269,6 +1269,18 @@ static void *atomic_mmu_lookup(CPUArchState *env, target_ulong addr,

>      cpu_loop_exit_atomic(env_cpu(env), retaddr);

>  }

>

> +/*

> + * In order for the expected constant folding to happen,

> + * we require that some functions be inlined.

> + * However, this inlining can make debugging harder for a

> + * non-optimizing build.

> + */

> +#ifdef __OPTIMIZE__

> +#define ALWAYS_INLINE  __attribute__((always_inline))

> +#else

> +#define ALWAYS_INLINE

> +#endif

> +


Maybe this should go in compiler.h ?

>  /*

>   * Load Helpers

>   *

> @@ -1281,7 +1293,7 @@ static void *atomic_mmu_lookup(CPUArchState *env, target_ulong addr,

>  typedef uint64_t FullLoadHelper(CPUArchState *env, target_ulong addr,

>                                  TCGMemOpIdx oi, uintptr_t retaddr);

>

> -static inline uint64_t __attribute__((always_inline))

> +static inline uint64_t ALWAYS_INLINE

>  load_helper(CPUArchState *env, target_ulong addr, TCGMemOpIdx oi,

>              uintptr_t retaddr, MemOp op, bool code_read,

>              FullLoadHelper *full_load)

> @@ -1530,7 +1542,7 @@ tcg_target_ulong helper_be_ldsl_mmu(CPUArchState *env, target_ulong addr,

>   * Store Helpers

>   */

>

> -static inline void __attribute__((always_inline))

> +static inline void ALWAYS_INLINE

>  store_helper(CPUArchState *env, target_ulong addr, uint64_t val,

>               TCGMemOpIdx oi, uintptr_t retaddr, MemOp op)

>  {

> --

> 2.17.1


Either way

Reviewed-by: Peter Maydell <peter.maydell@linaro.org>


thanks
-- PMM
diff mbox series

Patch

diff --git a/accel/tcg/cputlb.c b/accel/tcg/cputlb.c
index abae79650c..909f01ebcc 100644
--- a/accel/tcg/cputlb.c
+++ b/accel/tcg/cputlb.c
@@ -1269,6 +1269,18 @@  static void *atomic_mmu_lookup(CPUArchState *env, target_ulong addr,
     cpu_loop_exit_atomic(env_cpu(env), retaddr);
 }
 
+/*
+ * In order for the expected constant folding to happen,
+ * we require that some functions be inlined.
+ * However, this inlining can make debugging harder for a
+ * non-optimizing build.
+ */
+#ifdef __OPTIMIZE__
+#define ALWAYS_INLINE  __attribute__((always_inline))
+#else
+#define ALWAYS_INLINE
+#endif
+
 /*
  * Load Helpers
  *
@@ -1281,7 +1293,7 @@  static void *atomic_mmu_lookup(CPUArchState *env, target_ulong addr,
 typedef uint64_t FullLoadHelper(CPUArchState *env, target_ulong addr,
                                 TCGMemOpIdx oi, uintptr_t retaddr);
 
-static inline uint64_t __attribute__((always_inline))
+static inline uint64_t ALWAYS_INLINE
 load_helper(CPUArchState *env, target_ulong addr, TCGMemOpIdx oi,
             uintptr_t retaddr, MemOp op, bool code_read,
             FullLoadHelper *full_load)
@@ -1530,7 +1542,7 @@  tcg_target_ulong helper_be_ldsl_mmu(CPUArchState *env, target_ulong addr,
  * Store Helpers
  */
 
-static inline void __attribute__((always_inline))
+static inline void ALWAYS_INLINE
 store_helper(CPUArchState *env, target_ulong addr, uint64_t val,
              TCGMemOpIdx oi, uintptr_t retaddr, MemOp op)
 {