@@ -3441,6 +3441,7 @@ FIELD(TBFLAG_ANY, FPEXC_EL, 8, 2)
FIELD(TBFLAG_ANY, DEBUG_TARGET_EL, 10, 2)
/* Memory operations require alignment: SCTLR_ELx.A or CCR.UNALIGN_TRP */
FIELD(TBFLAG_ANY, ALIGN_MEM, 12, 1)
+FIELD(TBFLAG_ANY, PSTATE__IL, 13, 1)
/*
* Bit usage when in AArch32 state, both A- and M-profile.
@@ -270,4 +270,9 @@ static inline uint32_t syn_wfx(int cv, int cond, int ti, bool is_16bit)
(cv << 24) | (cond << 20) | ti;
}
+static inline uint32_t syn_illegalstate(void)
+{
+ return (EC_ILLEGALSTATE << ARM_EL_EC_SHIFT) | ARM_EL_IL;
+}
+
#endif /* TARGET_ARM_SYNDROME_H */
@@ -98,6 +98,8 @@ typedef struct DisasContext {
bool hstr_active;
/* True if memory operations require alignment */
bool align_mem;
+ /* True if PSTATE.IL is set */
+ bool pstate_il;
/*
* >= 0, a copy of PSTATE.BTYPE, which will be 0 without v8.5-BTI.
* < 0, set by the current instruction.
@@ -1071,6 +1071,7 @@ illegal_return:
if (!arm_singlestep_active(env)) {
env->pstate &= ~PSTATE_SS;
}
+ helper_rebuild_hflags_a64(env, cur_el);
qemu_log_mask(LOG_GUEST_ERROR, "Illegal exception return at EL%d: "
"resuming execution at 0x%" PRIx64 "\n", cur_el, env->pc);
}
@@ -13408,6 +13408,10 @@ static CPUARMTBFlags rebuild_hflags_a32(CPUARMState *env, int fp_el,
DP_TBFLAG_A32(flags, HSTR_ACTIVE, 1);
}
+ if (env->uncached_cpsr & CPSR_IL) {
+ DP_TBFLAG_ANY(flags, PSTATE__IL, 1);
+ }
+
return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags);
}
@@ -13502,6 +13506,10 @@ static CPUARMTBFlags rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
}
}
+ if (env->pstate & PSTATE_IL) {
+ DP_TBFLAG_ANY(flags, PSTATE__IL, 1);
+ }
+
if (cpu_isar_feature(aa64_mte, env_archcpu(env))) {
/*
* Set MTE_ACTIVE if any access may be Checked, and leave clear
@@ -14662,6 +14662,16 @@ static void disas_a64_insn(CPUARMState *env, DisasContext *s)
s->fp_access_checked = false;
s->sve_access_checked = false;
+ if (s->pstate_il) {
+ /*
+ * Illegal execution state. This has priority over BTI
+ * exceptions, but comes after instruction abort exceptions.
+ */
+ gen_exception_insn(s, s->pc_curr, EXCP_UDEF,
+ syn_illegalstate(), default_exception_el(s));
+ return;
+ }
+
if (dc_isar_feature(aa64_bti, s)) {
if (s->base.num_insns == 1) {
/*
@@ -14780,6 +14790,7 @@ static void aarch64_tr_init_disas_context(DisasContextBase *dcbase,
#endif
dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
dc->align_mem = EX_TBFLAG_ANY(tb_flags, ALIGN_MEM);
+ dc->pstate_il = EX_TBFLAG_ANY(tb_flags, PSTATE__IL);
dc->sve_excp_el = EX_TBFLAG_A64(tb_flags, SVEEXC_EL);
dc->sve_len = (EX_TBFLAG_A64(tb_flags, ZCR_LEN) + 1) * 16;
dc->pauth_active = EX_TBFLAG_A64(tb_flags, PAUTH_ACTIVE);
@@ -9045,6 +9045,16 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
return;
}
+ if (s->pstate_il) {
+ /*
+ * Illegal execution state. This has priority over BTI
+ * exceptions, but comes after instruction abort exceptions.
+ */
+ gen_exception_insn(s, s->pc_curr, EXCP_UDEF,
+ syn_illegalstate(), default_exception_el(s));
+ return;
+ }
+
if (cond == 0xf) {
/* In ARMv3 and v4 the NV condition is UNPREDICTABLE; we
* choose to UNDEF. In ARMv5 and above the space is used
@@ -9313,6 +9323,7 @@ static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
#endif
dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
dc->align_mem = EX_TBFLAG_ANY(tb_flags, ALIGN_MEM);
+ dc->pstate_il = EX_TBFLAG_ANY(tb_flags, PSTATE__IL);
if (arm_feature(env, ARM_FEATURE_M)) {
dc->vfp_enabled = 1;
@@ -9576,6 +9587,16 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
}
dc->insn = insn;
+ if (dc->pstate_il) {
+ /*
+ * Illegal execution state. This has priority over BTI
+ * exceptions, but comes after instruction abort exceptions.
+ */
+ gen_exception_insn(dc, dc->pc_curr, EXCP_UDEF,
+ syn_illegalstate(), default_exception_el(dc));
+ return;
+ }
+
if (dc->eci) {
/*
* For M-profile continuable instructions, ECI/ICI handling