summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--target/arm/gdbstub.c9
-rw-r--r--target/arm/machine.c10
-rw-r--r--target/arm/translate.c3
3 files changed, 20 insertions, 2 deletions
diff --git a/target/arm/gdbstub.c b/target/arm/gdbstub.c
index 134da0d0ae..ca1de47511 100644
--- a/target/arm/gdbstub.c
+++ b/target/arm/gdbstub.c
@@ -77,8 +77,13 @@ int arm_cpu_gdb_write_register(CPUState *cs, uint8_t *mem_buf, int n)
tmp = ldl_p(mem_buf);
- /* Mask out low bit of PC to workaround gdb bugs. This will probably
- cause problems if we ever implement the Jazelle DBX extensions. */
+ /*
+ * Mask out low bits of PC to workaround gdb bugs.
+ * This avoids an assert in thumb_tr_translate_insn, because it is
+ * architecturally impossible to misalign the pc.
+ * This will probably cause problems if we ever implement the
+ * Jazelle DBX extensions.
+ */
if (n == 15) {
tmp &= ~1;
}
diff --git a/target/arm/machine.c b/target/arm/machine.c
index c74d8c3f4b..135d2420b5 100644
--- a/target/arm/machine.c
+++ b/target/arm/machine.c
@@ -794,6 +794,16 @@ static int cpu_post_load(void *opaque, int version_id)
return -1;
}
}
+
+ /*
+ * Misaligned thumb pc is architecturally impossible.
+ * We have an assert in thumb_tr_translate_insn to verify this.
+ * Fail an incoming migrate to avoid this assert.
+ */
+ if (!is_a64(env) && env->thumb && (env->regs[15] & 1)) {
+ return -1;
+ }
+
if (!kvm_enabled()) {
pmu_op_finish(&cpu->env);
}
diff --git a/target/arm/translate.c b/target/arm/translate.c
index 45917c3a6d..0a3840d227 100644
--- a/target/arm/translate.c
+++ b/target/arm/translate.c
@@ -9646,6 +9646,9 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
uint32_t insn;
bool is_16bit;
+ /* Misaligned thumb PC is architecturally impossible. */
+ assert((dc->base.pc_next & 1) == 0);
+
if (arm_check_ss_active(dc) || arm_check_kernelpage(dc)) {
dc->base.pc_next = pc + 2;
return;