@@ -76,8 +76,13 @@ int arm_cpu_gdb_write_register(CPUState *cs, uint8_t *mem_buf, int n)
tmp = ldl_p(mem_buf);
- /* Mask out low bit of PC to workaround gdb bugs. This will probably
- cause problems if we ever implement the Jazelle DBX extensions. */
+ /*
+ * Mask out low bits of PC to workaround gdb bugs.
+ * This avoids an assert in thumb_tr_translate_insn, because it is
+ * architecturally impossible to misalign the pc.
+ * This will probably cause problems if we ever implement the
+ * Jazelle DBX extensions.
+ */
if (n == 15) {
tmp &= ~1;
}
@@ -781,6 +781,15 @@ static int cpu_post_load(void *opaque, int version_id)
hw_breakpoint_update_all(cpu);
hw_watchpoint_update_all(cpu);
+ /*
+ * Misaligned thumb pc is architecturally impossible.
+ * We have an assert in thumb_tr_translate_insn to verify this.
+ * Fail an incoming migrate to avoid this assert.
+ */
+ if (!is_a64(env) && env->thumb && (env->regs[15] & 1)) {
+ return -1;
+ }
+
if (!kvm_enabled()) {
pmu_op_finish(&cpu->env);
}
@@ -9640,6 +9640,9 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
uint32_t insn;
bool is_16bit;
+ /* Misaligned thumb PC is architecturally impossible. */
+ assert((dc->base.pc_next & 1) == 0);
+
if (arm_check_ss_active(dc) || arm_check_kernelpage(dc)) {
dc->base.pc_next += 2;
return;