2 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
4 * SPDX-License-Identifier: BSD-3-Clause
8 #include <asm_macros.S>
10 #include <common/bl_common.h>
13 /* -----------------------------------------------------------------------------
14 * Very simple stackless exception handlers used by BL1.
15 * -----------------------------------------------------------------------------
19 vector_base bl1_exceptions
21 /* -----------------------------------------------------
22 * Current EL with SP0 : 0x0 - 0x200
23 * -----------------------------------------------------
25 vector_entry SynchronousExceptionSP0
26 mov x0, #SYNC_EXCEPTION_SP_EL0
27 bl plat_report_exception
28 no_ret plat_panic_handler
29 end_vector_entry SynchronousExceptionSP0
33 bl plat_report_exception
34 no_ret plat_panic_handler
35 end_vector_entry IrqSP0
39 bl plat_report_exception
40 no_ret plat_panic_handler
41 end_vector_entry FiqSP0
43 vector_entry SErrorSP0
44 mov x0, #SERROR_SP_EL0
45 bl plat_report_exception
46 no_ret plat_panic_handler
47 end_vector_entry SErrorSP0
49 /* -----------------------------------------------------
50 * Current EL with SPx: 0x200 - 0x400
51 * -----------------------------------------------------
53 vector_entry SynchronousExceptionSPx
54 mov x0, #SYNC_EXCEPTION_SP_ELX
55 bl plat_report_exception
56 no_ret plat_panic_handler
57 end_vector_entry SynchronousExceptionSPx
61 bl plat_report_exception
62 no_ret plat_panic_handler
63 end_vector_entry IrqSPx
67 bl plat_report_exception
68 no_ret plat_panic_handler
69 end_vector_entry FiqSPx
71 vector_entry SErrorSPx
72 mov x0, #SERROR_SP_ELX
73 bl plat_report_exception
74 no_ret plat_panic_handler
75 end_vector_entry SErrorSPx
77 /* -----------------------------------------------------
78 * Lower EL using AArch64 : 0x400 - 0x600
79 * -----------------------------------------------------
81 vector_entry SynchronousExceptionA64
82 /* Enable the SError interrupt */
83 msr daifclr, #DAIF_ABT_BIT
85 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
87 /* Expect only SMC exceptions */
89 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
90 cmp x30, #EC_AARCH64_SMC
91 b.ne unexpected_sync_exception
94 end_vector_entry SynchronousExceptionA64
98 bl plat_report_exception
99 no_ret plat_panic_handler
100 end_vector_entry IrqA64
104 bl plat_report_exception
105 no_ret plat_panic_handler
106 end_vector_entry FiqA64
108 vector_entry SErrorA64
109 mov x0, #SERROR_AARCH64
110 bl plat_report_exception
111 no_ret plat_panic_handler
112 end_vector_entry SErrorA64
114 /* -----------------------------------------------------
115 * Lower EL using AArch32 : 0x600 - 0x800
116 * -----------------------------------------------------
118 vector_entry SynchronousExceptionA32
119 mov x0, #SYNC_EXCEPTION_AARCH32
120 bl plat_report_exception
121 no_ret plat_panic_handler
122 end_vector_entry SynchronousExceptionA32
126 bl plat_report_exception
127 no_ret plat_panic_handler
128 end_vector_entry IrqA32
132 bl plat_report_exception
133 no_ret plat_panic_handler
134 end_vector_entry FiqA32
136 vector_entry SErrorA32
137 mov x0, #SERROR_AARCH32
138 bl plat_report_exception
139 no_ret plat_panic_handler
140 end_vector_entry SErrorA32
145 /* ----------------------------------------------
146 * Detect if this is a RUN_IMAGE or other SMC.
147 * ----------------------------------------------
149 mov x30, #BL1_SMC_RUN_IMAGE
153 /* ------------------------------------------------
154 * Make sure only Secure world reaches here.
155 * ------------------------------------------------
159 b.ne unexpected_sync_exception
161 /* ----------------------------------------------
162 * Handling RUN_IMAGE SMC. First switch back to
163 * SP_EL0 for the C runtime stack.
164 * ----------------------------------------------
166 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
170 /* ---------------------------------------------------------------------
171 * Pass EL3 control to next BL image.
172 * Here it expects X1 with the address of a entry_point_info_t
173 * structure describing the next BL image entrypoint.
174 * ---------------------------------------------------------------------
179 bl bl1_print_next_bl_ep_info
181 ldp x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET]
184 ubfx x0, x1, #MODE_EL_SHIFT, #2
186 b.ne unexpected_sync_exception
188 bl disable_mmu_icache_el3
190 dsb ish /* ERET implies ISB, so it is not needed here */
193 bl print_debug_loop_message
199 bl bl1_plat_prepare_exit
201 ldp x6, x7, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x30)]
202 ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)]
203 ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)]
204 ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)]
206 endfunc smc_handler64
208 unexpected_sync_exception:
209 mov x0, #SYNC_EXCEPTION_AARCH64
210 bl plat_report_exception
211 no_ret plat_panic_handler
213 /* -----------------------------------------------------
214 * Save Secure/Normal world context and jump to
216 * -----------------------------------------------------
219 /* -----------------------------------------------------
220 * Save the GP registers x0-x29.
221 * TODO: Revisit to store only SMCCC specified registers.
222 * -----------------------------------------------------
226 /* -----------------------------------------------------
227 * If Secure Cycle Counter is not disabled in MDCR_EL3
228 * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
229 * disable all event counters and cycle counter.
230 * -----------------------------------------------------
232 bl save_pmcr_disable_pmu
234 /* -----------------------------------------------------
235 * Populate the parameters for the SMC handler. We
236 * already have x0-x4 in place. x5 will point to a
237 * cookie (not used now). x6 will point to the context
238 * structure (SP_EL3) and x7 will contain flags we need
239 * to pass to the handler.
240 * -----------------------------------------------------
245 /* -----------------------------------------------------
246 * Restore the saved C runtime stack value which will
247 * become the new SP_EL0 i.e. EL3 runtime stack. It was
248 * saved in the 'cpu_context' structure prior to the last
250 * -----------------------------------------------------
252 ldr x12, [x6, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
254 /* ---------------------------------------------
255 * Switch back to SP_EL0 for the C runtime stack.
256 * ---------------------------------------------
261 /* -----------------------------------------------------
262 * Save the SPSR_EL3, ELR_EL3, & SCR_EL3 in case there
263 * is a world switch during SMC handling.
264 * -----------------------------------------------------
269 stp x16, x17, [x6, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
270 str x18, [x6, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
272 /* Copy SCR_EL3.NS bit to the flag to indicate caller's security */
275 /* -----------------------------------------------------
276 * Go to BL1 SMC handler.
277 * -----------------------------------------------------
281 /* -----------------------------------------------------
282 * Do the transition to next BL image.
283 * -----------------------------------------------------