@@ -191,6 +191,9 @@ extern const struct VMStateDescription vmstate_arm_cpu;
void register_cp_regs_for_features(ARMCPU *cpu);
void init_cpreg_list(ARMCPU *cpu);
+bool arm_cpu_do_hvc(CPUState *cs);
+bool arm_cpu_do_smc(CPUState *cs);
+
void arm_cpu_do_interrupt(CPUState *cpu);
void arm_v7m_cpu_do_interrupt(CPUState *cpu);
@@ -51,6 +51,8 @@
#define EXCP_EXCEPTION_EXIT 8 /* Return from v7M exception. */
#define EXCP_KERNEL_TRAP 9 /* Jumped to kernel code page. */
#define EXCP_STREX 10
+#define EXCP_HVC 11
+#define EXCP_SMC 12
#define ARMV7M_EXCP_RESET 1
#define ARMV7M_EXCP_NMI 2
@@ -483,6 +483,17 @@ void aarch64_cpu_do_interrupt(CPUState *cs)
case EXCP_FIQ:
addr += 0x100;
break;
+ case EXCP_HVC:
+ if (arm_cpu_do_hvc(cs)) {
+ return;
+ }
+ cpu_abort(cs, "Unhandled exception 0x%x\n", cs->exception_index);
+ return;
+ case EXCP_SMC:
+ if (arm_cpu_do_smc(cs)) {
+ return;
+ }
+ /* Fall-though */
default:
cpu_abort(cs, "Unhandled exception 0x%x\n", cs->exception_index);
}
@@ -3497,6 +3497,16 @@ void arm_v7m_cpu_do_interrupt(CPUState *cs)
env->thumb = addr & 1;
}
+bool arm_cpu_do_hvc(CPUState *cs)
+{
+ return false;
+}
+
+bool arm_cpu_do_smc(CPUState *cs)
+{
+ return false;
+}
+
/* Handle a CPU exception. */
void arm_cpu_do_interrupt(CPUState *cs)
{
@@ -3599,6 +3609,17 @@ void arm_cpu_do_interrupt(CPUState *cs)
mask = CPSR_A | CPSR_I | CPSR_F;
offset = 4;
break;
+ case EXCP_HVC:
+ if (arm_cpu_do_hvc(cs)) {
+ return;
+ }
+ cpu_abort(cs, "Unhandled exception 0x%x\n", cs->exception_index);
+ return;
+ case EXCP_SMC:
+ if (arm_cpu_do_smc(cs)) {
+ return;
+ }
+ /* Fall-though */
default:
cpu_abort(cs, "Unhandled exception 0x%x\n", cs->exception_index);
return; /* Never happens. Keep compiler happy. */
@@ -210,6 +210,21 @@ static inline uint32_t syn_aa32_svc(uint32_t imm16, bool is_thumb)
| (is_thumb ? 0 : ARM_EL_IL);
}
+static inline uint32_t syn_aa64_hvc(uint32_t imm16)
+{
+ return (EC_AA64_HVC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
+}
+
+static inline uint32_t syn_aa32_hvc(uint32_t imm16)
+{
+ return (EC_AA32_HVC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
+}
+
+static inline uint32_t syn_aa64_smc(uint32_t imm16)
+{
+ return (EC_AA64_SMC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
+}
+
static inline uint32_t syn_aa64_bkpt(uint32_t imm16)
{
return (EC_AA64_BKPT << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
@@ -1473,20 +1473,28 @@ static void disas_exc(DisasContext *s, uint32_t insn)
switch (opc) {
case 0:
- /* SVC, HVC, SMC; since we don't support the Virtualization
- * or TrustZone extensions these all UNDEF except SVC.
- */
- if (op2_ll != 1) {
- unallocated_encoding(s);
- break;
- }
/* For SVC, HVC and SMC we advance the single-step state
* machine before taking the exception. This is architecturally
* mandated, to ensure that single-stepping a system call
* instruction works properly.
*/
- gen_ss_advance(s);
- gen_exception_insn(s, 0, EXCP_SWI, syn_aa64_svc(imm16));
+ switch (op2_ll) {
+ case 1:
+ gen_ss_advance(s);
+ gen_exception_insn(s, 0, EXCP_SWI, syn_aa64_svc(imm16));
+ break;
+ case 2:
+ gen_ss_advance(s);
+ gen_exception_insn(s, 0, EXCP_HVC, syn_aa64_smc(imm16));
+ break;
+ case 3:
+ gen_ss_advance(s);
+ gen_exception_insn(s, 0, EXCP_SMC, syn_aa64_hvc(imm16));
+ break;
+ default:
+ unallocated_encoding(s);
+ break;
+ }
break;
case 1:
if (op2_ll != 0) {
@@ -7871,9 +7871,14 @@ static void disas_arm_insn(CPUARMState * env, DisasContext *s)
case 7:
{
int imm16 = extract32(insn, 0, 4) | (extract32(insn, 8, 12) << 4);
- /* SMC instruction (op1 == 3)
- and undefined instructions (op1 == 0 || op1 == 2)
- will trap */
+ /* HVC and SMC instructions */
+ if (op1 == 2) {
+ gen_exception_insn(s, 0, EXCP_HVC, imm16);
+ break;
+ } else if (op1 == 3) {
+ gen_exception_insn(s, 0, EXCP_SMC, 0);
+ break;
+ }
if (op1 != 1) {
goto illegal_op;
}
@@ -9709,10 +9714,15 @@ static int disas_thumb2_insn(CPUARMState *env, DisasContext *s, uint16_t insn_hw
goto illegal_op;
if (insn & (1 << 26)) {
- /* Secure monitor call (v6Z) */
- qemu_log_mask(LOG_UNIMP,
- "arm: unimplemented secure monitor call\n");
- goto illegal_op; /* not implemented. */
+ if (!(insn & (1 << 20))) {
+ /* Hypervisor call (v7) */
+ uint32_t imm16 = extract32(insn, 16, 4);
+ imm16 |= extract32(insn, 0, 12) << 4;
+ gen_exception_insn(s, 0, EXCP_HVC, imm16);
+ } else {
+ /* Secure monitor call (v6+) */
+ gen_exception_insn(s, 0, EXCP_SMC, 0);
+ }
} else {
op = (insn >> 20) & 7;
switch (op) {