1 /*-
2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3 * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 * $FreeBSD: releng/10.2/sys/arm/arm/cpu_asm-v6.S 278645 2015-02-13 00:12:21Z ian $
28 */
29
30 #include <machine/acle-compat.h>
31 #include <machine/asm.h>
32 #include <machine/asmacros.h>
33 #include <machine/armreg.h>
34 #include <machine/sysreg.h>
35
36 /*
37 * Define cache functions used by startup code, which counts on the fact that
38 * only r0-r3,r12 (ip) are modified and no stack space is used. These functions
39 * must be called with interrupts disabled. Moreover, these work only with
40 * caches integrated to CPU (accessible via CP15); systems with an external L2
41 * cache controller such as a PL310 need separate calls to that device driver
42 * to affect L2 caches. This is not a factor during early kernel startup, as
43 * any external L2 cache controller has not been enabled yet.
44 */
45
46 /* Invalidate D cache to PoC. (aka all cache levels)*/
47 ASENTRY_NP(dcache_inv_poc_all)
48 #if __ARM_ARCH == 6
49 mcr CP15_DCIALL
50 DSB
51 bx lr
52 #else
53 mrc CP15_CLIDR(r0)
54 ands r0, r0, #0x07000000
55 mov r0, r0, lsr #23 /* Get LoC 'naturally' aligned for */
56 beq 4f /* use in the CSSELR register below */
57
58 1: sub r0, #2
59 mcr CP15_CSSELR(r0) /* set cache level */
60 isb
61 mrc CP15_CCSIDR(r0) /* read CCSIDR */
62
63 ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
64 ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
65 clz r1, r3 /* number of bits to MSB of way */
66 lsl r3, r3, r1 /* shift into position */
67 mov ip, #1
68 lsl ip, ip, r1 /* ip now contains the way decr */
69
70 ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
71 add r0, r0, #4 /* apply bias */
72 lsl r2, r2, r0 /* shift sets by log2(linesize) */
73 add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
74 sub ip, ip, r2 /* subtract numsets - 1 from way decr */
75 mov r1, #1
76 lsl r1, r1, r0 /* r1 now contains the set decr */
77 mov r2, ip /* r2 now contains set way decr */
78
79 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
80 2: mcr CP15_DCISW(r3) /* invalidate line */
81 movs r0, r3 /* get current way/set */
82 beq 3f /* at 0 means we are done */
83 movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
84 subne r3, r3, r1 /* non-zero?, decrement set */
85 subeq r3, r3, r2 /* zero?, decrement way and restore set count */
86 b 2b
87
88 3:
89 mrc CP15_CSSELR(r0) /* get cache level */
90 teq r0, #0
91 bne 1b
92
93 4: dsb /* wait for stores to finish */
94 mov r0, #0
95 mcr CP15_CSSELR(r0)
96 isb
97 bx lr
98 #endif /* __ARM_ARCH == 6 */
99 END(dcache_inv_poc_all)
100
101 /* Invalidate D cache to PoU. (aka L1 cache only)*/
102 ASENTRY_NP(dcache_inv_pou_all)
103 #if __ARM_ARCH == 6
104 mcr CP15_DCIALL
105 DSB
106 bx lr
107 #else
108 mrc CP15_CLIDR(r0)
109 ands r0, r0, #0x38000000
110 mov r0, r0, lsr #26 /* Get LoUU (naturally aligned) */
111 beq 4f
112
113 1: sub r0, #2
114 mcr CP15_CSSELR(r0) /* set cache level */
115 isb
116 mrc CP15_CCSIDR(r0) /* read CCSIDR */
117
118 ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
119 ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
120 clz r1, r3 /* number of bits to MSB of way */
121 lsl r3, r3, r1 /* shift into position */
122 mov ip, #1
123 lsl ip, ip, r1 /* ip now contains the way decr */
124
125 ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
126 add r0, r0, #4 /* apply bias */
127 lsl r2, r2, r0 /* shift sets by log2(linesize) */
128 add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
129 sub ip, ip, r2 /* subtract numsets - 1 from way decr */
130 mov r1, #1
131 lsl r1, r1, r0 /* r1 now contains the set decr */
132 mov r2, ip /* r2 now contains set way decr */
133
134 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
135 2: mcr CP15_DCISW(r3) /* invalidate line */
136 movs r0, r3 /* get current way/set */
137 beq 3f /* at 0 means we are done */
138 movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
139 subne r3, r3, r1 /* non-zero?, decrement set */
140 subeq r3, r3, r2 /* zero?, decrement way and restore set count */
141 b 2b
142
143 3:
144 mrc CP15_CSSELR(r0) /* get cache level */
145 teq r0, #0
146 bne 1b
147
148 4: dsb /* wait for stores to finish */
149 mov r0, #0
150 mcr CP15_CSSELR(r0)
151 bx lr
152 #endif
153 END(dcache_inv_pou_all)
154
155 /* Write back and Invalidate D cache to PoC. */
156 ASENTRY_NP(dcache_wbinv_poc_all)
157 #if __ARM_ARCH == 6
158 mcr CP15_DCCIALL
159 DSB
160 bx lr
161 #else
162 mrc CP15_CLIDR(r0)
163 ands r0, r0, #0x07000000
164 beq 4f
165 mov r0, #0 /* Clean from inner to outer levels */
166
167 1: mcr CP15_CSSELR(r0) /* set cache level */
168 isb
169 mrc CP15_CCSIDR(r0) /* read CCSIDR */
170
171 ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
172 ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
173 clz r1, r3 /* number of bits to MSB of way */
174 lsl r3, r3, r1 /* shift into position */
175 mov ip, #1
176 lsl ip, ip, r1 /* ip now contains the way decr */
177
178 ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
179 add r0, r0, #4 /* apply bias */
180 lsl r2, r2, r0 /* shift sets by log2(linesize) */
181 add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
182 sub ip, ip, r2 /* subtract numsets - 1 from way decr */
183 mov r1, #1
184 lsl r1, r1, r0 /* r1 now contains the set decr */
185 mov r2, ip /* r2 now contains set way decr */
186
187 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
188 2: mcr CP15_DCCISW(r3) /* clean and invalidate line */
189 movs r0, r3 /* get current way/set */
190 beq 3f /* at 0 means we are done */
191 movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
192 subne r3, r3, r1 /* non-zero?, decrement set */
193 subeq r3, r3, r2 /* zero?, decrement way and restore set count */
194 b 2b
195
196 3:
197 mrc CP15_CSSELR(r0) /* get cache level */
198 add r0, r0, #2 /* next level */
199 mrc CP15_CLIDR(r1)
200 ands r1, r1, #0x07000000
201 mov r1, r1, lsr #23 /* Get LoC (naturally aligned) */
202 cmp r1, r0
203 bne 1b
204
205 4: dsb /* wait for stores to finish */
206 mov r0, #0
207 mcr CP15_CSSELR(r0)
208 bx lr
209 #endif /* __ARM_ARCH == 6 */
210 END(dcache_wbinv_poc_all)
Cache object: 1bbb43d2dbb0f0a1e16f3e41ae14ad6e
|