@@ -255,6 +255,11 @@ static inline void *kvm_get_hyp_vector(void)
return kvm_ksym_ref(__kvm_hyp_vector_bp_inv);
}
+ case ARM_CPU_PART_CORTEX_A15:
+ {
+ extern char __kvm_hyp_vector_ic_inv[];
+ return kvm_ksym_ref(__kvm_hyp_vector_ic_inv);
+ }
#endif
default:
{
@@ -73,6 +73,28 @@ __kvm_hyp_vector:
#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
.align 5
+__kvm_hyp_vector_ic_inv:
+ .global __kvm_hyp_vector_ic_inv
+
+ /*
+ * We encode the exception entry in the bottom 3 bits of
+ * SP, and we have to guarantee to be 8 bytes aligned.
+ */
+ W(add) sp, sp, #1 /* Reset 7 */
+ W(add) sp, sp, #1 /* Undef 6 */
+ W(add) sp, sp, #1 /* Syscall 5 */
+ W(add) sp, sp, #1 /* Prefetch abort 4 */
+ W(add) sp, sp, #1 /* Data abort 3 */
+ W(add) sp, sp, #1 /* HVC 2 */
+ W(add) sp, sp, #1 /* IRQ 1 */
+ W(nop) /* FIQ 0 */
+
+ mcr p15, 0, r0, c7, c5, 0 /* ICIALLU */
+ isb
+
+ b decode_vectors
+
+ .align 5
__kvm_hyp_vector_bp_inv:
.global __kvm_hyp_vector_bp_inv
@@ -92,6 +114,8 @@ __kvm_hyp_vector_bp_inv:
mcr p15, 0, r0, c7, c5, 6 /* BPIALL */
isb
+decode_vectors:
+
#ifdef CONFIG_THUMB2_KERNEL
/*
* Yet another silly hack: Use VPIDR as a temp register.