Sanitise includes across codebase
[project/bcm63xx/atf.git] / include / arch / aarch32 / arch_helpers.h
1 /*
2 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef ARCH_HELPERS_H
8 #define ARCH_HELPERS_H
9
10 #include <cdefs.h>
11 #include <stdint.h>
12 #include <string.h>
13
14 #include <arch.h>
15
16 /**********************************************************************
17 * Macros which create inline functions to read or write CPU system
18 * registers
19 *********************************************************************/
20
21 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
22 static inline void write_## _name(u_register_t v) \
23 { \
24 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
25 }
26
27 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
28 static inline u_register_t read_ ## _name(void) \
29 { \
30 u_register_t v; \
31 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
32 return v; \
33 }
34
35 /*
36 * The undocumented %Q and %R extended asm are used to implemented the below
37 * 64 bit `mrrc` and `mcrr` instructions.
38 */
39
40 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
41 static inline void write64_## _name(uint64_t v) \
42 { \
43 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
44 }
45
46 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
47 static inline uint64_t read64_## _name(void) \
48 { uint64_t v; \
49 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
50 return v; \
51 }
52
53 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
54 static inline u_register_t read_ ## _name(void) \
55 { \
56 u_register_t v; \
57 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
58 return v; \
59 }
60
61 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
62 static inline void write_ ## _name(u_register_t v) \
63 { \
64 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
65 }
66
67 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
68 static inline void write_ ## _name(const u_register_t v) \
69 { \
70 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
71 }
72
73 /* Define read function for coproc register */
74 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \
75 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
76
77 /* Define write function for coproc register */
78 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
79 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
80
81 /* Define read & write function for coproc register */
82 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
83 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
84 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
85
86 /* Define 64 bit read function for coproc register */
87 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
88 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
89
90 /* Define 64 bit write function for coproc register */
91 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
92 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
93
94 /* Define 64 bit read & write function for coproc register */
95 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
96 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
97 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
98
99 /* Define read & write function for system register */
100 #define DEFINE_SYSREG_RW_FUNCS(_name) \
101 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
102 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
103
104 /**********************************************************************
105 * Macros to create inline functions for tlbi operations
106 *********************************************************************/
107
108 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
109 static inline void tlbi##_op(void) \
110 { \
111 u_register_t v = 0; \
112 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
113 }
114
115 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
116 static inline void bpi##_op(void) \
117 { \
118 u_register_t v = 0; \
119 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
120 }
121
122 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
123 static inline void tlbi##_op(u_register_t v) \
124 { \
125 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
126 }
127
128 /* Define function for simple TLBI operation */
129 #define DEFINE_TLBIOP_FUNC(_op, ...) \
130 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
131
132 /* Define function for TLBI operation with register parameter */
133 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
134 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
135
136 /* Define function for simple BPI operation */
137 #define DEFINE_BPIOP_FUNC(_op, ...) \
138 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
139
140 /**********************************************************************
141 * Macros to create inline functions for DC operations
142 *********************************************************************/
143 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
144 static inline void dc##_op(u_register_t v) \
145 { \
146 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
147 }
148
149 /* Define function for DC operation with register parameter */
150 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
151 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
152
153 /**********************************************************************
154 * Macros to create inline functions for system instructions
155 *********************************************************************/
156 /* Define function for simple system instruction */
157 #define DEFINE_SYSOP_FUNC(_op) \
158 static inline void _op(void) \
159 { \
160 __asm__ (#_op); \
161 }
162
163
164 /* Define function for system instruction with type specifier */
165 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
166 static inline void _op ## _type(void) \
167 { \
168 __asm__ (#_op " " #_type); \
169 }
170
171 /* Define function for system instruction with register parameter */
172 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
173 static inline void _op ## _type(u_register_t v) \
174 { \
175 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
176 }
177
178 void flush_dcache_range(uintptr_t addr, size_t size);
179 void clean_dcache_range(uintptr_t addr, size_t size);
180 void inv_dcache_range(uintptr_t addr, size_t size);
181
182 void dcsw_op_louis(u_register_t op_type);
183 void dcsw_op_all(u_register_t op_type);
184
185 void disable_mmu_secure(void);
186 void disable_mmu_icache_secure(void);
187
188 DEFINE_SYSOP_FUNC(wfi)
189 DEFINE_SYSOP_FUNC(wfe)
190 DEFINE_SYSOP_FUNC(sev)
191 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
192 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
193 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
194
195 /* dmb ld is not valid for armv7/thumb machines */
196 #if ARM_ARCH_MAJOR != 7
197 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
198 #endif
199
200 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
201 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
202 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
203 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
204 DEFINE_SYSOP_FUNC(isb)
205
206 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
207 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
208
209 DEFINE_SYSREG_RW_FUNCS(spsr)
210 DEFINE_SYSREG_RW_FUNCS(cpsr)
211
212 /*******************************************************************************
213 * System register accessor prototypes
214 ******************************************************************************/
215 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
216 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
217 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
218 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
219 DEFINE_COPROCR_READ_FUNC(isr, ISR)
220 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
221 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
222
223 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
224 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
225 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
226 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
227 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
228 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
229 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
230 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
231 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
232 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
233 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
234 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
235 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
236 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
237 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
238 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
239 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
240 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
241 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
242 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
243 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
244 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
245 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
246 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
247 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
248 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
249 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
250 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
251
252 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
253 CNTP_CTL_ENABLE_MASK)
254 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
255 CNTP_CTL_IMASK_MASK)
256 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
257 CNTP_CTL_ISTATUS_MASK)
258
259 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
260 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
261
262 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
263 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
264
265 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
266 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
267 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
268 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
269 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
270 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
271 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
272 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
273 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
274 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
275 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
276 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
277 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
278 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
279 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
280 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
281
282 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
283 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
284 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR)
285
286 /*
287 * Address translation
288 */
289 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
290 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
291 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
292
293 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
294
295 /* AArch32 coproc registers for 32bit MMU descriptor support */
296 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
297 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
298 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
299
300 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
301 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
302 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
303 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
304
305 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
306 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
307 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
308 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
309
310 /*
311 * TLBI operation prototypes
312 */
313 DEFINE_TLBIOP_FUNC(all, TLBIALL)
314 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
315 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
316 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
317 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
318 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
319
320 /*
321 * BPI operation prototypes.
322 */
323 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
324
325 /*
326 * DC operation prototypes
327 */
328 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
329 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
330 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
331
332 /* Previously defined accessor functions with incomplete register names */
333 #define dsb() dsbsy()
334 #define dmb() dmbsy()
335
336 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
337 #if ARM_ARCH_MAJOR == 7
338 #define dmbld() dmb()
339 #endif
340
341 #define IS_IN_SECURE() \
342 (GET_NS_BIT(read_scr()) == 0)
343
344 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
345 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
346 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
347 #define IS_IN_EL2() IS_IN_HYP()
348 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
349 #define IS_IN_EL3() \
350 ((GET_M32(read_cpsr()) == MODE32_mon) || \
351 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
352
353 static inline unsigned int get_current_el(void)
354 {
355 if (IS_IN_EL3()) {
356 return 3U;
357 } else if (IS_IN_EL2()) {
358 return 2U;
359 } else {
360 return 1U;
361 }
362 }
363
364 /* Macros for compatibility with AArch64 system registers */
365 #define read_mpidr_el1() read_mpidr()
366
367 #define read_scr_el3() read_scr()
368 #define write_scr_el3(_v) write_scr(_v)
369
370 #define read_hcr_el2() read_hcr()
371 #define write_hcr_el2(_v) write_hcr(_v)
372
373 #define read_cpacr_el1() read_cpacr()
374 #define write_cpacr_el1(_v) write_cpacr(_v)
375
376 #define read_cntfrq_el0() read_cntfrq()
377 #define write_cntfrq_el0(_v) write_cntfrq(_v)
378 #define read_isr_el1() read_isr()
379
380 #define read_cntpct_el0() read64_cntpct()
381
382 #define read_ctr_el0() read_ctr()
383
384 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
385
386 #define read_daif() read_cpsr()
387 #define write_daif(flags) write_cpsr(flags)
388
389 #define read_cnthp_cval_el2() read64_cnthp_cval_el2()
390 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
391
392 #define read_amcntenset0_el0() read_amcntenset0()
393 #define read_amcntenset1_el0() read_amcntenset1()
394
395 /* Helper functions to manipulate CPSR */
396 static inline void enable_irq(void)
397 {
398 /*
399 * The compiler memory barrier will prevent the compiler from
400 * scheduling non-volatile memory access after the write to the
401 * register.
402 *
403 * This could happen if some initialization code issues non-volatile
404 * accesses to an area used by an interrupt handler, in the assumption
405 * that it is safe as the interrupts are disabled at the time it does
406 * that (according to program order). However, non-volatile accesses
407 * are not necessarily in program order relatively with volatile inline
408 * assembly statements (and volatile accesses).
409 */
410 COMPILER_BARRIER();
411 __asm__ volatile ("cpsie i");
412 isb();
413 }
414
415 static inline void enable_serror(void)
416 {
417 COMPILER_BARRIER();
418 __asm__ volatile ("cpsie a");
419 isb();
420 }
421
422 static inline void enable_fiq(void)
423 {
424 COMPILER_BARRIER();
425 __asm__ volatile ("cpsie f");
426 isb();
427 }
428
429 static inline void disable_irq(void)
430 {
431 COMPILER_BARRIER();
432 __asm__ volatile ("cpsid i");
433 isb();
434 }
435
436 static inline void disable_serror(void)
437 {
438 COMPILER_BARRIER();
439 __asm__ volatile ("cpsid a");
440 isb();
441 }
442
443 static inline void disable_fiq(void)
444 {
445 COMPILER_BARRIER();
446 __asm__ volatile ("cpsid f");
447 isb();
448 }
449
450 #endif /* ARCH_HELPERS_H */