2 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
4 * SPDX-License-Identifier: BSD-3-Clause
8 #include <asm_macros.S>
10 #include <services/arm_arch_svc.h>
12 .globl wa_cve_2017_5715_mmu_vbar
14 #define ESR_EL3_A64_SMC0 0x5e000000
15 #define ESR_EL3_A32_SMC0 0x4e000000
17 vector_base wa_cve_2017_5715_mmu_vbar
19 .macro apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
20 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
23 bic x1, x1, #SCTLR_M_BIT
27 orr x1, x1, #SCTLR_M_BIT
30 * Defer ISB to avoid synchronizing twice in case we hit
31 * the workaround SMC call which will implicitly synchronize
32 * because of the ERET instruction.
36 * Ensure SMC is coming from A64/A32 state on #0
37 * with W0 = SMCCC_ARCH_WORKAROUND_1
39 * This sequence evaluates as:
40 * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
41 * allowing use of a single branch operation
43 .if \_is_sync_exception
44 orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
47 mov_imm w1, \_esr_el3_val
49 /* Static predictor will predict a fall through */
56 * Synchronize now to enable the MMU. This is required
57 * to ensure the load pair below reads the data stored earlier.
60 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
63 /* ---------------------------------------------------------------------
64 * Current EL with SP_EL0 : 0x0 - 0x200
65 * ---------------------------------------------------------------------
67 vector_entry mmu_sync_exception_sp_el0
68 b sync_exception_sp_el0
69 end_vector_entry mmu_sync_exception_sp_el0
71 vector_entry mmu_irq_sp_el0
73 end_vector_entry mmu_irq_sp_el0
75 vector_entry mmu_fiq_sp_el0
77 end_vector_entry mmu_fiq_sp_el0
79 vector_entry mmu_serror_sp_el0
81 end_vector_entry mmu_serror_sp_el0
83 /* ---------------------------------------------------------------------
84 * Current EL with SP_ELx: 0x200 - 0x400
85 * ---------------------------------------------------------------------
87 vector_entry mmu_sync_exception_sp_elx
88 b sync_exception_sp_elx
89 end_vector_entry mmu_sync_exception_sp_elx
91 vector_entry mmu_irq_sp_elx
93 end_vector_entry mmu_irq_sp_elx
95 vector_entry mmu_fiq_sp_elx
97 end_vector_entry mmu_fiq_sp_elx
99 vector_entry mmu_serror_sp_elx
101 end_vector_entry mmu_serror_sp_elx
103 /* ---------------------------------------------------------------------
104 * Lower EL using AArch64 : 0x400 - 0x600
105 * ---------------------------------------------------------------------
107 vector_entry mmu_sync_exception_aarch64
108 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
109 b sync_exception_aarch64
110 end_vector_entry mmu_sync_exception_aarch64
112 vector_entry mmu_irq_aarch64
113 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
115 end_vector_entry mmu_irq_aarch64
117 vector_entry mmu_fiq_aarch64
118 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
120 end_vector_entry mmu_fiq_aarch64
122 vector_entry mmu_serror_aarch64
123 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
125 end_vector_entry mmu_serror_aarch64
127 /* ---------------------------------------------------------------------
128 * Lower EL using AArch32 : 0x600 - 0x800
129 * ---------------------------------------------------------------------
131 vector_entry mmu_sync_exception_aarch32
132 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
133 b sync_exception_aarch32
134 end_vector_entry mmu_sync_exception_aarch32
136 vector_entry mmu_irq_aarch32
137 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
139 end_vector_entry mmu_irq_aarch32
141 vector_entry mmu_fiq_aarch32
142 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
144 end_vector_entry mmu_fiq_aarch32
146 vector_entry mmu_serror_aarch32
147 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
149 end_vector_entry mmu_serror_aarch32