4787490ba2034bc183b2caab0070f55da69bb0fe
[project/bcm63xx/atf.git] / plat / bcm / aarch32 / pl310_a32.S
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3 * Copyright (c) 2014, STMicroelectronics International N.V.
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright notice,
10 * this list of conditions and the following disclaimer.
11 *
12 * 2. Redistributions in binary form must reproduce the above copyright notice,
13 * this list of conditions and the following disclaimer in the documentation
14 * and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28 #include <asm_macros.S>
29 #include <platform_def.h>
30 #include <cpu_macros.S>
31
32 #define PL310_LOCKDOWN_NBREGS 8
33 #define PL310_LOCKDOWN_SZREG 4
34
35 #define PL310_8WAYS_MASK 0x00FF
36 #define PL310_16WAYS_UPPERMASK 0xFF00
37
38 .globl arm_cl2_lockallways
39 .globl arm_cl2_cleaninvbyway
40 .globl arm_cl2_invbyway
41 .globl arm_cl2_cleanbyway
42 .globl arm_cl2_cleanbypa
43 .globl arm_cl2_invbypa
44 .globl arm_cl2_cleaninvbypa
45 /*
46 * void arm_cl2_lockallways(vaddr_t base)
47 *
48 * lock all L2 caches ways for data and instruction
49 */
50 func arm_cl2_lockallways
51 add r1, r0, #PL310_DCACHE_LOCKDOWN_BASE
52 ldr r2, [r0, #PL310_AUX_CTRL]
53 tst r2, #PL310_AUX_16WAY_BIT
54 mov r2, #PL310_8WAYS_MASK
55 orrne r2, #PL310_16WAYS_UPPERMASK
56 mov r0, #PL310_LOCKDOWN_NBREGS
57 1: /* lock Dcache and Icache */
58 str r2, [r1], #PL310_LOCKDOWN_SZREG
59 str r2, [r1], #PL310_LOCKDOWN_SZREG
60 subs r0, r0, #1
61 bne 1b
62
63 mov pc, lr
64 endfunc arm_cl2_lockallways
65
66 /*
67 * Set sync operation mask according to ways associativity.
68 * Preserve r0 = pl310 iomem base address
69 */
70 .macro syncbyway_set_mask reg
71 ldr \reg, [r0, #PL310_AUX_CTRL]
72 tst \reg, #PL310_AUX_16WAY_BIT
73 mov \reg, #PL310_8WAYS_MASK
74 orrne \reg, \reg, #PL310_16WAYS_UPPERMASK
75 .endm
76
77 /*
78 * void arm_cl2_cleaninvbyway(vaddr_t base)
79 * clean & invalidate the whole L2 cache.
80 */
81 func arm_cl2_cleaninvbyway
82
83 syncbyway_set_mask r1
84 str r1, [r0, #PL310_FLUSH_BY_WAY]
85
86 /* Wait for all cache ways to be cleaned and invalidated */
87 loop_cli_way_done:
88 ldr r2, [r0, #PL310_FLUSH_BY_WAY]
89 and r2, r2, r1
90 cmp r2, #0
91 bne loop_cli_way_done
92
93 /* Cache Sync */
94
95 /*
96 * Wait for writing cache sync
97 * To PL310, Cache sync is atomic opertion, no need to check
98 * the status. For PL220, this check is needed. Keeping the loop
99 * for PL310 is no harm for PL310.
100 */
101 loop_cli_sync:
102 ldr r1, [r0, #PL310_SYNC]
103 cmp r1, #0
104 bne loop_cli_sync
105
106 mov r1, #0
107 str r1, [r0, #PL310_SYNC]
108
109 loop_cli_sync_done:
110 ldr r1, [r0, #PL310_SYNC]
111 cmp r1, #0
112 bne loop_cli_sync_done
113
114 mov pc, lr
115 endfunc arm_cl2_cleaninvbyway
116
117 /* void arm_cl2_invbyway(vaddr_t base) */
118 func arm_cl2_invbyway
119
120 syncbyway_set_mask r1
121 str r1, [r0, #PL310_INV_BY_WAY]
122
123 loop_inv_way_done:
124 ldr r2, [r0, #PL310_INV_BY_WAY]
125 and r2, r2, r1
126 cmp r2, #0
127 bne loop_inv_way_done
128
129 loop_inv_way_sync:
130 ldr r1, [r0, #PL310_SYNC]
131 cmp r1, #0
132 bne loop_inv_way_sync
133
134 mov r1, #0
135 str r1, [r0, #PL310_SYNC]
136
137 loop_inv_way_sync_done:
138 ldr r1, [r0, #PL310_SYNC]
139 cmp r1, #0
140 bne loop_inv_way_sync_done
141
142 mov pc, lr
143 endfunc arm_cl2_invbyway
144
145 /* void arm_cl2_cleanbyway(vaddr_t base) */
146 func arm_cl2_cleanbyway
147
148 syncbyway_set_mask r1
149 str r1, [r0, #PL310_CLEAN_BY_WAY]
150
151 loop_cl_way_done:
152 ldr r2, [r0, #PL310_CLEAN_BY_WAY]
153 and r2, r2, r1
154 cmp r2, #0
155 bne loop_cl_way_done
156
157 loop_cl_way_sync:
158 ldr r1, [r0, #PL310_SYNC]
159 cmp r1, #0
160 bne loop_cl_way_sync
161
162 mov r1, #0
163 str r1, [r0, #PL310_SYNC]
164
165 loop_cl_way_sync_done:
166 ldr r1, [r0, #PL310_SYNC]
167 cmp r1, #0
168 bne loop_cl_way_sync_done
169
170 mov pc, lr
171 endfunc arm_cl2_cleanbyway
172
173 /*
174 * void _arm_cl2_xxxbypa(vaddr_t pl310_base, paddr_t start, paddr_t end,
175 * int pl310value);
176 * pl310value is one of PL310_CLEAN_BY_PA, PL310_INV_BY_PA or PL310_FLUSH_BY_PA
177 */
178 func _arm_cl2_xxxbypa
179 /* Align start address on PL310 line size */
180 and r1, #(~(PL310_LINE_SIZE - 1))
181 #ifdef SCU_BASE
182 /*
183 * ARM ERRATA #764369
184 * Undocummented SCU Diagnostic Control Register
185 */
186 /*
187 * NOTE:
188 * We're assuming that if mmu is enabled PL310_BASE and SCU_BASE
189 * still have the same relative offsets from each other.
190 */
191 sub r0, r0, #(PL310_BASE - SCU_BASE)
192 mov r12, #1
193 str r12, [r0, #SCU_ERRATA744369]
194 dsb
195 add r0, r0, #(PL310_BASE - SCU_BASE)
196 #endif
197 loop_cl2_xxxbypa:
198 str r1, [r0, r3]
199
200 loop_xxx_pa_done:
201 ldr r12, [r0, r3]
202 and r12, r12, r1
203 cmp r12, #0
204 bne loop_xxx_pa_done
205
206 add r1, r1, #PL310_LINE_SIZE
207 cmp r2, r1
208 bpl loop_cl2_xxxbypa
209
210 loop_xxx_pa_sync:
211 ldr r12, [r0, #PL310_SYNC]
212 cmp r12, #0
213 bne loop_xxx_pa_sync
214
215 mov r12, #0
216 str r12, [r0, #PL310_SYNC]
217
218 loop_xxx_pa_sync_done:
219 ldr r12, [r0, #PL310_SYNC]
220 cmp r12, #0
221 bne loop_xxx_pa_sync_done
222
223 mov pc, lr
224 endfunc _arm_cl2_xxxbypa
225
226 /*
227 * void _arm_cl2_cleanbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
228 * clean L2 cache by physical address range.
229 */
230 func arm_cl2_cleanbypa
231 mov r3, #PL310_CLEAN_BY_PA
232 b _arm_cl2_xxxbypa
233 endfunc arm_cl2_cleanbypa
234
235 /*
236 * void arm_cl2_invbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
237 * invalidate L2 cache by physical address range.
238 */
239 func arm_cl2_invbypa
240 mov r3, #PL310_INV_BY_PA
241 b _arm_cl2_xxxbypa
242 endfunc arm_cl2_invbypa
243
244 /*
245 * void arm_cl2_cleaninvbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
246 * clean and invalidate L2 cache by physical address range.
247 */
248 func arm_cl2_cleaninvbypa
249 mov r3, #PL310_FLUSH_BY_PA
250 b _arm_cl2_xxxbypa
251 endfunc arm_cl2_cleaninvbypa
252