2 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
4 * SPDX-License-Identifier: BSD-3-Clause
16 /**********************************************************************
17 * Macros which create inline functions to read or write CPU system
19 *********************************************************************/
21 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
22 static inline void write_## _name(u_register_t v) \
24 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
27 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
28 static inline u_register_t read_ ## _name(void) \
31 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
36 * The undocumented %Q and %R extended asm are used to implemented the below
37 * 64 bit `mrrc` and `mcrr` instructions.
40 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
41 static inline void write64_## _name(uint64_t v) \
43 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
46 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
47 static inline uint64_t read64_## _name(void) \
49 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
53 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
54 static inline u_register_t read_ ## _name(void) \
57 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
61 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
62 static inline void write_ ## _name(u_register_t v) \
64 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
67 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
68 static inline void write_ ## _name(const u_register_t v) \
70 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
73 /* Define read function for coproc register */
74 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \
75 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
77 /* Define write function for coproc register */
78 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
79 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
81 /* Define read & write function for coproc register */
82 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
83 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
84 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
86 /* Define 64 bit read function for coproc register */
87 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
88 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
90 /* Define 64 bit write function for coproc register */
91 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
92 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
94 /* Define 64 bit read & write function for coproc register */
95 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
96 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
97 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
99 /* Define read & write function for system register */
100 #define DEFINE_SYSREG_RW_FUNCS(_name) \
101 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
102 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
104 /**********************************************************************
105 * Macros to create inline functions for tlbi operations
106 *********************************************************************/
108 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
109 static inline void tlbi##_op(void) \
111 u_register_t v = 0; \
112 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
115 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
116 static inline void bpi##_op(void) \
118 u_register_t v = 0; \
119 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
122 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
123 static inline void tlbi##_op(u_register_t v) \
125 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
128 /* Define function for simple TLBI operation */
129 #define DEFINE_TLBIOP_FUNC(_op, ...) \
130 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
132 /* Define function for TLBI operation with register parameter */
133 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
134 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
136 /* Define function for simple BPI operation */
137 #define DEFINE_BPIOP_FUNC(_op, ...) \
138 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
140 /**********************************************************************
141 * Macros to create inline functions for DC operations
142 *********************************************************************/
143 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
144 static inline void dc##_op(u_register_t v) \
146 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
149 /* Define function for DC operation with register parameter */
150 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
151 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
153 /**********************************************************************
154 * Macros to create inline functions for system instructions
155 *********************************************************************/
156 /* Define function for simple system instruction */
157 #define DEFINE_SYSOP_FUNC(_op) \
158 static inline void _op(void) \
164 /* Define function for system instruction with type specifier */
165 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
166 static inline void _op ## _type(void) \
168 __asm__ (#_op " " #_type); \
171 /* Define function for system instruction with register parameter */
172 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
173 static inline void _op ## _type(u_register_t v) \
175 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
178 void flush_dcache_range(uintptr_t addr
, size_t size
);
179 void clean_dcache_range(uintptr_t addr
, size_t size
);
180 void inv_dcache_range(uintptr_t addr
, size_t size
);
182 void dcsw_op_louis(u_register_t op_type
);
183 void dcsw_op_all(u_register_t op_type
);
185 void disable_mmu_secure(void);
186 void disable_mmu_icache_secure(void);
188 DEFINE_SYSOP_FUNC(wfi
)
189 DEFINE_SYSOP_FUNC(wfe
)
190 DEFINE_SYSOP_FUNC(sev
)
191 DEFINE_SYSOP_TYPE_FUNC(dsb
, sy
)
192 DEFINE_SYSOP_TYPE_FUNC(dmb
, sy
)
193 DEFINE_SYSOP_TYPE_FUNC(dmb
, st
)
195 /* dmb ld is not valid for armv7/thumb machines */
196 #if ARM_ARCH_MAJOR != 7
197 DEFINE_SYSOP_TYPE_FUNC(dmb
, ld
)
200 DEFINE_SYSOP_TYPE_FUNC(dsb
, ish
)
201 DEFINE_SYSOP_TYPE_FUNC(dsb
, ishst
)
202 DEFINE_SYSOP_TYPE_FUNC(dmb
, ish
)
203 DEFINE_SYSOP_TYPE_FUNC(dmb
, ishst
)
204 DEFINE_SYSOP_FUNC(isb
)
206 void __dead2
smc(uint32_t r0
, uint32_t r1
, uint32_t r2
, uint32_t r3
,
207 uint32_t r4
, uint32_t r5
, uint32_t r6
, uint32_t r7
);
209 DEFINE_SYSREG_RW_FUNCS(spsr
)
210 DEFINE_SYSREG_RW_FUNCS(cpsr
)
212 /*******************************************************************************
213 * System register accessor prototypes
214 ******************************************************************************/
215 DEFINE_COPROCR_READ_FUNC(mpidr
, MPIDR
)
216 DEFINE_COPROCR_READ_FUNC(midr
, MIDR
)
217 DEFINE_COPROCR_READ_FUNC(id_pfr0
, ID_PFR0
)
218 DEFINE_COPROCR_READ_FUNC(id_pfr1
, ID_PFR1
)
219 DEFINE_COPROCR_READ_FUNC(isr
, ISR
)
220 DEFINE_COPROCR_READ_FUNC(clidr
, CLIDR
)
221 DEFINE_COPROCR_READ_FUNC_64(cntpct
, CNTPCT_64
)
223 DEFINE_COPROCR_RW_FUNCS(scr
, SCR
)
224 DEFINE_COPROCR_RW_FUNCS(ctr
, CTR
)
225 DEFINE_COPROCR_RW_FUNCS(sctlr
, SCTLR
)
226 DEFINE_COPROCR_RW_FUNCS(actlr
, ACTLR
)
227 DEFINE_COPROCR_RW_FUNCS(hsctlr
, HSCTLR
)
228 DEFINE_COPROCR_RW_FUNCS(hcr
, HCR
)
229 DEFINE_COPROCR_RW_FUNCS(hcptr
, HCPTR
)
230 DEFINE_COPROCR_RW_FUNCS(cntfrq
, CNTFRQ
)
231 DEFINE_COPROCR_RW_FUNCS(cnthctl
, CNTHCTL
)
232 DEFINE_COPROCR_RW_FUNCS(mair0
, MAIR0
)
233 DEFINE_COPROCR_RW_FUNCS(mair1
, MAIR1
)
234 DEFINE_COPROCR_RW_FUNCS(hmair0
, HMAIR0
)
235 DEFINE_COPROCR_RW_FUNCS(ttbcr
, TTBCR
)
236 DEFINE_COPROCR_RW_FUNCS(htcr
, HTCR
)
237 DEFINE_COPROCR_RW_FUNCS(ttbr0
, TTBR0
)
238 DEFINE_COPROCR_RW_FUNCS_64(ttbr0
, TTBR0_64
)
239 DEFINE_COPROCR_RW_FUNCS(ttbr1
, TTBR1
)
240 DEFINE_COPROCR_RW_FUNCS_64(httbr
, HTTBR_64
)
241 DEFINE_COPROCR_RW_FUNCS(vpidr
, VPIDR
)
242 DEFINE_COPROCR_RW_FUNCS(vmpidr
, VMPIDR
)
243 DEFINE_COPROCR_RW_FUNCS_64(vttbr
, VTTBR_64
)
244 DEFINE_COPROCR_RW_FUNCS_64(ttbr1
, TTBR1_64
)
245 DEFINE_COPROCR_RW_FUNCS_64(cntvoff
, CNTVOFF_64
)
246 DEFINE_COPROCR_RW_FUNCS(csselr
, CSSELR
)
247 DEFINE_COPROCR_RW_FUNCS(hstr
, HSTR
)
248 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2
, CNTHP_CTL
)
249 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2
, CNTHP_TVAL
)
250 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2
, CNTHP_CVAL_64
)
252 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
253 CNTP_CTL_ENABLE_MASK)
254 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
256 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
257 CNTP_CTL_ISTATUS_MASK)
259 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
260 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
262 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
263 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
265 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1
, ICC_SRE
)
266 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2
, ICC_HSRE
)
267 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3
, ICC_MSRE
)
268 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1
, ICC_PMR
)
269 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1
, ICC_RPR
)
270 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3
, ICC_MGRPEN1
)
271 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1
, ICC_IGRPEN1
)
272 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1
, ICC_IGRPEN0
)
273 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1
, ICC_HPPIR0
)
274 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1
, ICC_HPPIR1
)
275 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1
, ICC_IAR0
)
276 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1
, ICC_IAR1
)
277 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1
, ICC_EOIR0
)
278 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1
, ICC_EOIR1
)
279 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1
, ICC_SGI0R_EL1_64
)
280 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r
, ICC_SGI1R_EL1_64
)
282 DEFINE_COPROCR_RW_FUNCS(hdcr
, HDCR
)
283 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl
, CNTHP_CTL
)
284 DEFINE_COPROCR_READ_FUNC(pmcr
, PMCR
)
287 * Address translation
289 DEFINE_COPROCR_WRITE_FUNC(ats1cpr
, ATS1CPR
)
290 DEFINE_COPROCR_WRITE_FUNC(ats1hr
, ATS1HR
)
291 DEFINE_COPROCR_RW_FUNCS_64(par
, PAR_64
)
293 DEFINE_COPROCR_RW_FUNCS(nsacr
, NSACR
)
295 /* AArch32 coproc registers for 32bit MMU descriptor support */
296 DEFINE_COPROCR_RW_FUNCS(prrr
, PRRR
)
297 DEFINE_COPROCR_RW_FUNCS(nmrr
, NMRR
)
298 DEFINE_COPROCR_RW_FUNCS(dacr
, DACR
)
300 DEFINE_COPROCR_RW_FUNCS(amcntenset0
, AMCNTENSET0
)
301 DEFINE_COPROCR_RW_FUNCS(amcntenset1
, AMCNTENSET1
)
302 DEFINE_COPROCR_RW_FUNCS(amcntenclr0
, AMCNTENCLR0
)
303 DEFINE_COPROCR_RW_FUNCS(amcntenclr1
, AMCNTENCLR1
)
305 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00
, AMEVCNTR00
)
306 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01
, AMEVCNTR01
)
307 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02
, AMEVCNTR02
)
308 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03
, AMEVCNTR03
)
311 * TLBI operation prototypes
313 DEFINE_TLBIOP_FUNC(all
, TLBIALL
)
314 DEFINE_TLBIOP_FUNC(allis
, TLBIALLIS
)
315 DEFINE_TLBIOP_PARAM_FUNC(mva
, TLBIMVA
)
316 DEFINE_TLBIOP_PARAM_FUNC(mvaa
, TLBIMVAA
)
317 DEFINE_TLBIOP_PARAM_FUNC(mvaais
, TLBIMVAAIS
)
318 DEFINE_TLBIOP_PARAM_FUNC(mvahis
, TLBIMVAHIS
)
321 * BPI operation prototypes.
323 DEFINE_BPIOP_FUNC(allis
, BPIALLIS
)
326 * DC operation prototypes
328 DEFINE_DCOP_PARAM_FUNC(civac
, DCCIMVAC
)
329 DEFINE_DCOP_PARAM_FUNC(ivac
, DCIMVAC
)
330 DEFINE_DCOP_PARAM_FUNC(cvac
, DCCMVAC
)
332 /* Previously defined accessor functions with incomplete register names */
333 #define dsb() dsbsy()
334 #define dmb() dmbsy()
336 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
337 #if ARM_ARCH_MAJOR == 7
338 #define dmbld() dmb()
341 #define IS_IN_SECURE() \
342 (GET_NS_BIT(read_scr()) == 0)
344 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
345 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
346 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
347 #define IS_IN_EL2() IS_IN_HYP()
348 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
349 #define IS_IN_EL3() \
350 ((GET_M32(read_cpsr()) == MODE32_mon) || \
351 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
353 static inline unsigned int get_current_el(void)
357 } else if (IS_IN_EL2()) {
364 /* Macros for compatibility with AArch64 system registers */
365 #define read_mpidr_el1() read_mpidr()
367 #define read_scr_el3() read_scr()
368 #define write_scr_el3(_v) write_scr(_v)
370 #define read_hcr_el2() read_hcr()
371 #define write_hcr_el2(_v) write_hcr(_v)
373 #define read_cpacr_el1() read_cpacr()
374 #define write_cpacr_el1(_v) write_cpacr(_v)
376 #define read_cntfrq_el0() read_cntfrq()
377 #define write_cntfrq_el0(_v) write_cntfrq(_v)
378 #define read_isr_el1() read_isr()
380 #define read_cntpct_el0() read64_cntpct()
382 #define read_ctr_el0() read_ctr()
384 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
386 #define read_daif() read_cpsr()
387 #define write_daif(flags) write_cpsr(flags)
389 #define read_cnthp_cval_el2() read64_cnthp_cval_el2()
390 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
392 #define read_amcntenset0_el0() read_amcntenset0()
393 #define read_amcntenset1_el0() read_amcntenset1()
395 /* Helper functions to manipulate CPSR */
396 static inline void enable_irq(void)
399 * The compiler memory barrier will prevent the compiler from
400 * scheduling non-volatile memory access after the write to the
403 * This could happen if some initialization code issues non-volatile
404 * accesses to an area used by an interrupt handler, in the assumption
405 * that it is safe as the interrupts are disabled at the time it does
406 * that (according to program order). However, non-volatile accesses
407 * are not necessarily in program order relatively with volatile inline
408 * assembly statements (and volatile accesses).
411 __asm__
volatile ("cpsie i");
415 static inline void enable_serror(void)
418 __asm__
volatile ("cpsie a");
422 static inline void enable_fiq(void)
425 __asm__
volatile ("cpsie f");
429 static inline void disable_irq(void)
432 __asm__
volatile ("cpsid i");
436 static inline void disable_serror(void)
439 __asm__
volatile ("cpsid a");
443 static inline void disable_fiq(void)
446 __asm__
volatile ("cpsid f");
450 #endif /* ARCH_HELPERS_H */