1 /*-
2 * Copyright (C) 2011 MARVELL INTERNATIONAL LTD.
3 * All rights reserved.
4 *
5 * Developed by Semihalf.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of MARVELL nor the names of contributors
16 * may be used to endorse or promote products derived from this software
17 * without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22 * ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
23 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 * SUCH DAMAGE.
30 */
31
32 #include <machine/asm.h>
33 __FBSDID("$FreeBSD: releng/10.0/sys/arm/arm/cpufunc_asm_armv7.S 248361 2013-03-16 02:48:49Z andrew $");
34
35 .cpu cortex-a8
36
37 .Lcoherency_level:
38 .word _C_LABEL(arm_cache_loc)
39 .Lcache_type:
40 .word _C_LABEL(arm_cache_type)
41 .Lway_mask:
42 .word 0x3ff
43 .Lmax_index:
44 .word 0x7fff
45 .Lpage_mask:
46 .word 0xfff
47
48 #define PT_NOS (1 << 5)
49 #define PT_S (1 << 1)
50 #define PT_INNER_NC 0
51 #define PT_INNER_WT (1 << 0)
52 #define PT_INNER_WB ((1 << 0) | (1 << 6))
53 #define PT_INNER_WBWA (1 << 6)
54 #define PT_OUTER_NC 0
55 #define PT_OUTER_WT (2 << 3)
56 #define PT_OUTER_WB (3 << 3)
57 #define PT_OUTER_WBWA (1 << 3)
58
59 #ifdef SMP
60 #define PT_ATTR (PT_S|PT_INNER_WT|PT_OUTER_WT|PT_NOS)
61 #else
62 #define PT_ATTR (PT_INNER_WT|PT_OUTER_WT)
63 #endif
64
65 ENTRY(armv7_setttb)
66 stmdb sp!, {r0, lr}
67 bl _C_LABEL(armv7_idcache_wbinv_all) /* clean the D cache */
68 ldmia sp!, {r0, lr}
69 dsb
70
71 orr r0, r0, #PT_ATTR
72 mcr p15, 0, r0, c2, c0, 0 /* Translation Table Base Register 0 (TTBR0) */
73 #ifdef SMP
74 mcr p15, 0, r0, c8, c3, 0 /* invalidate I+D TLBs Inner Shareable*/
75 #else
76 mcr p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */
77 #endif
78 dsb
79 isb
80 RET
81 END(armv7_setttb)
82
83 ENTRY(armv7_tlb_flushID)
84 dsb
85 #ifdef SMP
86 mcr p15, 0, r0, c8, c3, 0 /* flush I+D tlb */
87 mcr p15, 0, r0, c7, c1, 6 /* flush BTB */
88 #else
89 mcr p15, 0, r0, c8, c7, 0 /* flush I+D tlb */
90 mcr p15, 0, r0, c7, c5, 6 /* flush BTB */
91 #endif
92 dsb
93 isb
94 mov pc, lr
95 END(armv7_tlb_flushID)
96
97 ENTRY(armv7_tlb_flushID_SE)
98 ldr r1, .Lpage_mask
99 bic r0, r0, r1
100 #ifdef SMP
101 mcr p15, 0, r0, c8, c3, 1 /* flush D tlb single entry Inner Shareable*/
102 mcr p15, 0, r0, c7, c1, 6 /* flush BTB Inner Shareable */
103 #else
104 mcr p15, 0, r0, c8, c7, 1 /* flush D tlb single entry */
105 mcr p15, 0, r0, c7, c5, 6 /* flush BTB */
106 #endif
107 dsb
108 isb
109 mov pc, lr
110 END(armv7_tlb_flushID_SE)
111
112 /* Based on algorithm from ARM Architecture Reference Manual */
113 ENTRY(armv7_dcache_wbinv_all)
114 stmdb sp!, {r4, r5, r6, r7, r8, r9}
115
116 /* Get cache level */
117 ldr r0, .Lcoherency_level
118 ldr r3, [r0]
119 cmp r3, #0
120 beq Finished
121 /* For each cache level */
122 mov r8, #0
123 Loop1:
124 /* Get cache type for given level */
125 mov r2, r8, lsl #2
126 add r2, r2, r2
127 ldr r0, .Lcache_type
128 ldr r1, [r0, r2]
129
130 /* Get line size */
131 and r2, r1, #7
132 add r2, r2, #4
133
134 /* Get number of ways */
135 ldr r4, .Lway_mask
136 ands r4, r4, r1, lsr #3
137 clz r5, r4
138
139 /* Get max index */
140 ldr r7, .Lmax_index
141 ands r7, r7, r1, lsr #13
142 Loop2:
143 mov r9, r4
144 Loop3:
145 mov r6, r8, lsl #1
146 orr r6, r6, r9, lsl r5
147 orr r6, r6, r7, lsl r2
148
149 /* Clean and invalidate data cache by way/index */
150 mcr p15, 0, r6, c7, c14, 2
151 subs r9, r9, #1
152 bge Loop3
153 subs r7, r7, #1
154 bge Loop2
155 Skip:
156 add r8, r8, #1
157 cmp r3, r8
158 bne Loop1
159 Finished:
160 dsb
161 ldmia sp!, {r4, r5, r6, r7, r8, r9}
162 RET
163 END(armv7_dcache_wbinv_all)
164
165 ENTRY(armv7_idcache_wbinv_all)
166 stmdb sp!, {lr}
167 bl armv7_dcache_wbinv_all
168 #ifdef SMP
169 mcr p15, 0, r0, c7, c1, 0 /* Invalidate all I caches to PoU (ICIALLUIS) */
170 #else
171 mcr p15, 0, r0, c7, c5, 0 /* Invalidate all I caches to PoU (ICIALLU) */
172 #endif
173 dsb
174 isb
175 ldmia sp!, {lr}
176 RET
177 END(armv7_idcache_wbinv_all)
178
179 /* XXX Temporary set it to 32 for MV cores, however this value should be
180 * get from Cache Type register
181 */
182 .Larmv7_line_size:
183 .word 32
184
185 ENTRY(armv7_dcache_wb_range)
186 ldr ip, .Larmv7_line_size
187 sub r3, ip, #1
188 and r2, r0, r3
189 add r1, r1, r2
190 bic r0, r0, r3
191 .Larmv7_wb_next:
192 mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */
193 add r0, r0, ip
194 subs r1, r1, ip
195 bhi .Larmv7_wb_next
196 dsb /* data synchronization barrier */
197 RET
198 END(armv7_dcache_wb_range)
199
200 ENTRY(armv7_dcache_wbinv_range)
201 ldr ip, .Larmv7_line_size
202 sub r3, ip, #1
203 and r2, r0, r3
204 add r1, r1, r2
205 bic r0, r0, r3
206 .Larmv7_wbinv_next:
207 mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */
208 add r0, r0, ip
209 subs r1, r1, ip
210 bhi .Larmv7_wbinv_next
211 dsb /* data synchronization barrier */
212 RET
213 END(armv7_dcache_wbinv_range)
214
215 /*
216 * Note, we must not invalidate everything. If the range is too big we
217 * must use wb-inv of the entire cache.
218 */
219 ENTRY(armv7_dcache_inv_range)
220 ldr ip, .Larmv7_line_size
221 sub r3, ip, #1
222 and r2, r0, r3
223 add r1, r1, r2
224 bic r0, r0, r3
225 .Larmv7_inv_next:
226 mcr p15, 0, r0, c7, c6, 1 /* Invalidate D cache SE with VA */
227 add r0, r0, ip
228 subs r1, r1, ip
229 bhi .Larmv7_inv_next
230 dsb /* data synchronization barrier */
231 RET
232 END(armv7_dcache_inv_range)
233
234 ENTRY(armv7_idcache_wbinv_range)
235 ldr ip, .Larmv7_line_size
236 sub r3, ip, #1
237 and r2, r0, r3
238 add r1, r1, r2
239 bic r0, r0, r3
240 .Larmv7_id_wbinv_next:
241 mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */
242 mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */
243 add r0, r0, ip
244 subs r1, r1, ip
245 bhi .Larmv7_id_wbinv_next
246 isb /* instruction synchronization barrier */
247 dsb /* data synchronization barrier */
248 RET
249 END(armv7_idcache_wbinv_range)
250
251 ENTRY_NP(armv7_icache_sync_range)
252 ldr ip, .Larmv7_line_size
253 .Larmv7_sync_next:
254 mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */
255 mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */
256 add r0, r0, ip
257 subs r1, r1, ip
258 bhi .Larmv7_sync_next
259 isb /* instruction synchronization barrier */
260 dsb /* data synchronization barrier */
261 RET
262 END(armv7_icache_sync_range)
263
264 ENTRY(armv7_cpu_sleep)
265 dsb /* data synchronization barrier */
266 wfi /* wait for interrupt */
267 RET
268 END(armv7_cpu_sleep)
269
270 ENTRY(armv7_context_switch)
271 dsb
272 orr r0, r0, #PT_ATTR
273
274 mcr p15, 0, r0, c2, c0, 0 /* set the new TTB */
275 #ifdef SMP
276 mcr p15, 0, r0, c8, c3, 0 /* and flush the I+D tlbs Inner Sharable */
277 #else
278 mcr p15, 0, r0, c8, c7, 0 /* and flush the I+D tlbs */
279 #endif
280 dsb
281 isb
282 RET
283 END(armv7_context_switch)
284
285 ENTRY(armv7_drain_writebuf)
286 dsb
287 RET
288 END(armv7_drain_writebuf)
289
290 ENTRY(armv7_sev)
291 dsb
292 sev
293 nop
294 RET
295 END(armv7_sev)
296
297 ENTRY(armv7_auxctrl)
298 mrc p15, 0, r2, c1, c0, 1
299 bic r3, r2, r0 /* Clear bits */
300 eor r3, r3, r1 /* XOR bits */
301
302 teq r2, r3
303 mcrne p15, 0, r3, c1, c0, 1
304 mov r0, r2
305 RET
306 END(armv7_auxctrl)
307
Cache object: d9efc1154ec1a2e4c6c35a41764cdadd
|