1 /* SPDX-License-Identifier: BSD-2-Clause */
3 * Copyright (c) 2014, STMicroelectronics International N.V.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice,
10 * this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice,
13 * this list of conditions and the following disclaimer in the documentation
14 * and/or other materials provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
28 #include <asm_macros.S>
29 #include <platform_def.h>
30 #include <cpu_macros.S>
32 #define PL310_LOCKDOWN_NBREGS 8
33 #define PL310_LOCKDOWN_SZREG 4
35 #define PL310_8WAYS_MASK 0x00FF
36 #define PL310_16WAYS_UPPERMASK 0xFF00
38 .globl arm_cl2_lockallways
39 .globl arm_cl2_cleaninvbyway
40 .globl arm_cl2_invbyway
41 .globl arm_cl2_cleanbyway
42 .globl arm_cl2_cleanbypa
43 .globl arm_cl2_invbypa
44 .globl arm_cl2_cleaninvbypa
46 * void arm_cl2_lockallways(vaddr_t base)
48 * lock all L2 caches ways for data and instruction
50 func arm_cl2_lockallways
51 add r1, r0, #PL310_DCACHE_LOCKDOWN_BASE
52 ldr r2, [r0, #PL310_AUX_CTRL]
53 tst r2, #PL310_AUX_16WAY_BIT
54 mov r2, #PL310_8WAYS_MASK
55 orrne r2, #PL310_16WAYS_UPPERMASK
56 mov r0, #PL310_LOCKDOWN_NBREGS
57 1: /* lock Dcache and Icache */
58 str r2, [r1], #PL310_LOCKDOWN_SZREG
59 str r2, [r1], #PL310_LOCKDOWN_SZREG
64 endfunc arm_cl2_lockallways
67 * Set sync operation mask according to ways associativity.
68 * Preserve r0 = pl310 iomem base address
70 .macro syncbyway_set_mask reg
71 ldr \reg, [r0, #PL310_AUX_CTRL]
72 tst \reg, #PL310_AUX_16WAY_BIT
73 mov \reg, #PL310_8WAYS_MASK
74 orrne \reg, \reg, #PL310_16WAYS_UPPERMASK
78 * void arm_cl2_cleaninvbyway(vaddr_t base)
79 * clean & invalidate the whole L2 cache.
81 func arm_cl2_cleaninvbyway
84 str r1, [r0, #PL310_FLUSH_BY_WAY]
86 /* Wait for all cache ways to be cleaned and invalidated */
88 ldr r2, [r0, #PL310_FLUSH_BY_WAY]
96 * Wait for writing cache sync
97 * To PL310, Cache sync is atomic opertion, no need to check
98 * the status. For PL220, this check is needed. Keeping the loop
99 * for PL310 is no harm for PL310.
102 ldr r1, [r0, #PL310_SYNC]
107 str r1, [r0, #PL310_SYNC]
110 ldr r1, [r0, #PL310_SYNC]
112 bne loop_cli_sync_done
115 endfunc arm_cl2_cleaninvbyway
117 /* void arm_cl2_invbyway(vaddr_t base) */
118 func arm_cl2_invbyway
120 syncbyway_set_mask r1
121 str r1, [r0, #PL310_INV_BY_WAY]
124 ldr r2, [r0, #PL310_INV_BY_WAY]
127 bne loop_inv_way_done
130 ldr r1, [r0, #PL310_SYNC]
132 bne loop_inv_way_sync
135 str r1, [r0, #PL310_SYNC]
137 loop_inv_way_sync_done:
138 ldr r1, [r0, #PL310_SYNC]
140 bne loop_inv_way_sync_done
143 endfunc arm_cl2_invbyway
145 /* void arm_cl2_cleanbyway(vaddr_t base) */
146 func arm_cl2_cleanbyway
148 syncbyway_set_mask r1
149 str r1, [r0, #PL310_CLEAN_BY_WAY]
152 ldr r2, [r0, #PL310_CLEAN_BY_WAY]
158 ldr r1, [r0, #PL310_SYNC]
163 str r1, [r0, #PL310_SYNC]
165 loop_cl_way_sync_done:
166 ldr r1, [r0, #PL310_SYNC]
168 bne loop_cl_way_sync_done
171 endfunc arm_cl2_cleanbyway
174 * void _arm_cl2_xxxbypa(vaddr_t pl310_base, paddr_t start, paddr_t end,
176 * pl310value is one of PL310_CLEAN_BY_PA, PL310_INV_BY_PA or PL310_FLUSH_BY_PA
178 func _arm_cl2_xxxbypa
179 /* Align start address on PL310 line size */
180 and r1, #(~(PL310_LINE_SIZE - 1))
184 * Undocummented SCU Diagnostic Control Register
188 * We're assuming that if mmu is enabled PL310_BASE and SCU_BASE
189 * still have the same relative offsets from each other.
191 sub r0, r0, #(PL310_BASE - SCU_BASE)
193 str r12, [r0, #SCU_ERRATA744369]
195 add r0, r0, #(PL310_BASE - SCU_BASE)
206 add r1, r1, #PL310_LINE_SIZE
211 ldr r12, [r0, #PL310_SYNC]
216 str r12, [r0, #PL310_SYNC]
218 loop_xxx_pa_sync_done:
219 ldr r12, [r0, #PL310_SYNC]
221 bne loop_xxx_pa_sync_done
224 endfunc _arm_cl2_xxxbypa
227 * void _arm_cl2_cleanbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
228 * clean L2 cache by physical address range.
230 func arm_cl2_cleanbypa
231 mov r3, #PL310_CLEAN_BY_PA
233 endfunc arm_cl2_cleanbypa
236 * void arm_cl2_invbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
237 * invalidate L2 cache by physical address range.
240 mov r3, #PL310_INV_BY_PA
242 endfunc arm_cl2_invbypa
245 * void arm_cl2_cleaninvbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
246 * clean and invalidate L2 cache by physical address range.
248 func arm_cl2_cleaninvbypa
249 mov r3, #PL310_FLUSH_BY_PA
251 endfunc arm_cl2_cleaninvbypa