1 /* $NetBSD: cpufunc_asm_arm8.S,v 1.2 2001/11/11 00:47:49 thorpej Exp $ */
2
3 /*-
4 * Copyright (c) 1997 ARM Limited
5 * Copyright (c) 1997 Causality Limited
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
16 * 3. All advertising materials mentioning features or use of this software
17 * must display the following acknowledgement:
18 * This product includes software developed by Causality Limited.
19 * 4. The name of Causality Limited may not be used to endorse or promote
20 * products derived from this software without specific prior written
21 * permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY CAUSALITY LIMITED ``AS IS'' AND ANY EXPRESS
24 * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
25 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 * DISCLAIMED. IN NO EVENT SHALL CAUSALITY LIMITED BE LIABLE FOR ANY DIRECT,
27 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
28 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
29 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
32 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 * SUCH DAMAGE.
34 *
35 * ARM8 assembly functions for CPU / MMU / TLB specific operations
36 *
37 */
38
39 #include <machine/asm.h>
40 __FBSDID("$FreeBSD: releng/10.0/sys/arm/arm/cpufunc_asm_arm8.S 248361 2013-03-16 02:48:49Z andrew $");
41
42 ENTRY(arm8_clock_config)
43 mrc p15, 0, r3, c15, c0, 0 /* Read the clock register */
44 bic r2, r3, #0x11 /* turn off dynamic clocking
45 and clear L bit */
46 mcr p15, 0, r2, c15, c0, 0 /* Write clock register */
47
48 bic r2, r3, r0 /* Clear bits */
49 eor r2, r2, r1 /* XOR bits */
50 bic r2, r2, #0x10 /* clear the L bit */
51
52 bic r1, r2, #0x01 /* still keep dynamic clocking off */
53 mcr p15, 0, r1, c15, c0, 0 /* Write clock register */
54 mov r0, r0 /* NOP */
55 mov r0, r0 /* NOP */
56 mov r0, r0 /* NOP */
57 mov r0, r0 /* NOP */
58 mcr p15, 0, r2, c15, c0, 0 /* Write clock register */
59 mov r0, r3 /* Return old value */
60 RET
61 END(arm8_clock_config)
62
63 /*
64 * Functions to set the MMU Translation Table Base register
65 *
66 * We need to clean and flush the cache as it uses virtual
67 * addresses that are about to change.
68 */
69 ENTRY(arm8_setttb)
70 mrs r3, cpsr_all
71 orr r1, r3, #(I32_bit | F32_bit)
72 msr cpsr_all, r1
73
74 stmfd sp!, {r0-r3, lr}
75 bl _C_LABEL(arm8_cache_cleanID)
76 ldmfd sp!, {r0-r3, lr}
77 mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */
78
79 /* Write the TTB */
80 mcr p15, 0, r0, c2, c0, 0
81
82 /* If we have updated the TTB we must flush the TLB */
83 mcr p15, 0, r0, c8, c7, 0
84
85 /* For good measure we will flush the IDC as well */
86 mcr p15, 0, r0, c7, c7, 0
87
88 /* Make sure that pipeline is emptied */
89 mov r0, r0
90 mov r0, r0
91 msr cpsr_all, r3
92
93 RET
94 END(arm8_setttb)
95
96 /*
97 * TLB functions
98 */
99 ENTRY(arm8_tlb_flushID)
100 mcr p15, 0, r0, c8, c7, 0 /* flush I+D tlb */
101 RET
102 END(arm8_tlb_flushID)
103
104 ENTRY(arm8_tlb_flushID_SE)
105 mcr p15, 0, r0, c8, c7, 1 /* flush I+D tlb single entry */
106 RET
107 END(arm8_tlb_flushID_SE)
108
109 /*
110 * Cache functions
111 */
112 ENTRY(arm8_cache_flushID)
113 mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */
114 RET
115 END(arm8_cache_flushID)
116
117 ENTRY(arm8_cache_flushID_E)
118 mcr p15, 0, r0, c7, c7, 1 /* flush I+D single entry */
119 RET
120 END(arm8_cache_flushID_E)
121
122 ENTRY(arm8_cache_cleanID)
123 mov r0, #0x00000000
124
125 1: mov r2, r0
126 mcr p15, 0, r2, c7, c11, 1
127 add r2, r2, #0x10
128 mcr p15, 0, r2, c7, c11, 1
129 add r2, r2, #0x10
130 mcr p15, 0, r2, c7, c11, 1
131 add r2, r2, #0x10
132 mcr p15, 0, r2, c7, c11, 1
133 add r2, r2, #0x10
134 mcr p15, 0, r2, c7, c11, 1
135 add r2, r2, #0x10
136 mcr p15, 0, r2, c7, c11, 1
137 add r2, r2, #0x10
138 mcr p15, 0, r2, c7, c11, 1
139 add r2, r2, #0x10
140 mcr p15, 0, r2, c7, c11, 1
141 add r2, r2, #0x10
142 mcr p15, 0, r2, c7, c11, 1
143 add r2, r2, #0x10
144 mcr p15, 0, r2, c7, c11, 1
145 add r2, r2, #0x10
146 mcr p15, 0, r2, c7, c11, 1
147 add r2, r2, #0x10
148 mcr p15, 0, r2, c7, c11, 1
149 add r2, r2, #0x10
150 mcr p15, 0, r2, c7, c11, 1
151 add r2, r2, #0x10
152 mcr p15, 0, r2, c7, c11, 1
153 add r2, r2, #0x10
154 mcr p15, 0, r2, c7, c11, 1
155 add r2, r2, #0x10
156 mcr p15, 0, r2, c7, c11, 1
157
158 adds r0, r0, #0x04000000
159 bne 1b
160
161 RET
162 END(arm8_cache_cleanID)
163
164 ENTRY(arm8_cache_cleanID_E)
165 mcr p15, 0, r0, c7, c11, 1 /* clean I+D single entry */
166 RET
167 END(arm8_cache_cleanID_E)
168
169 ENTRY(arm8_cache_purgeID)
170 /*
171 * ARM810 bug 3
172 *
173 * Clean and invalidate entry will not invalidate the entry
174 * if the line was already clean. (mcr p15, 0, rd, c7, 15, 1)
175 *
176 * Instead of using the clean and invalidate entry operation
177 * use a separate clean and invalidate entry operations.
178 * i.e.
179 * mcr p15, 0, rd, c7, c11, 1
180 * mcr p15, 0, rd, c7, c7, 1
181 */
182
183 mov r0, #0x00000000
184
185 mrs r3, cpsr_all
186 orr r2, r3, #(I32_bit | F32_bit)
187 msr cpsr_all, r2
188
189 1: mov r2, r0
190 mcr p15, 0, r2, c7, c11, 1
191 mcr p15, 0, r2, c7, c7, 1
192 add r2, r2, #0x10
193 mcr p15, 0, r2, c7, c11, 1
194 mcr p15, 0, r2, c7, c7, 1
195 add r2, r2, #0x10
196 mcr p15, 0, r2, c7, c11, 1
197 mcr p15, 0, r2, c7, c7, 1
198 add r2, r2, #0x10
199 mcr p15, 0, r2, c7, c11, 1
200 mcr p15, 0, r2, c7, c7, 1
201 add r2, r2, #0x10
202 mcr p15, 0, r2, c7, c11, 1
203 mcr p15, 0, r2, c7, c7, 1
204 add r2, r2, #0x10
205 mcr p15, 0, r2, c7, c11, 1
206 mcr p15, 0, r2, c7, c7, 1
207 add r2, r2, #0x10
208 mcr p15, 0, r2, c7, c11, 1
209 mcr p15, 0, r2, c7, c7, 1
210 add r2, r2, #0x10
211 mcr p15, 0, r2, c7, c11, 1
212 mcr p15, 0, r2, c7, c7, 1
213 add r2, r2, #0x10
214 mcr p15, 0, r2, c7, c11, 1
215 mcr p15, 0, r2, c7, c7, 1
216 add r2, r2, #0x10
217 mcr p15, 0, r2, c7, c11, 1
218 mcr p15, 0, r2, c7, c7, 1
219 add r2, r2, #0x10
220 mcr p15, 0, r2, c7, c11, 1
221 mcr p15, 0, r2, c7, c7, 1
222 add r2, r2, #0x10
223 mcr p15, 0, r2, c7, c11, 1
224 mcr p15, 0, r2, c7, c7, 1
225 add r2, r2, #0x10
226 mcr p15, 0, r2, c7, c11, 1
227 mcr p15, 0, r2, c7, c7, 1
228 add r2, r2, #0x10
229 mcr p15, 0, r2, c7, c11, 1
230 mcr p15, 0, r2, c7, c7, 1
231 add r2, r2, #0x10
232 mcr p15, 0, r2, c7, c11, 1
233 mcr p15, 0, r2, c7, c7, 1
234 add r2, r2, #0x10
235 mcr p15, 0, r2, c7, c11, 1
236 mcr p15, 0, r2, c7, c7, 1
237
238 adds r0, r0, #0x04000000
239 bne 1b
240
241 msr cpsr_all, r3
242 RET
243 END(arm8_cache_purgeID)
244
245 ENTRY(arm8_cache_purgeID_E)
246 /*
247 * ARM810 bug 3
248 *
249 * Clean and invalidate entry will not invalidate the entry
250 * if the line was already clean. (mcr p15, 0, rd, c7, 15, 1)
251 *
252 * Instead of using the clean and invalidate entry operation
253 * use a separate clean and invalidate entry operations.
254 * i.e.
255 * mcr p15, 0, rd, c7, c11, 1
256 * mcr p15, 0, rd, c7, c7, 1
257 */
258 mrs r3, cpsr_all
259 orr r2, r3, #(I32_bit | F32_bit)
260 msr cpsr_all, r2
261 mcr p15, 0, r0, c7, c11, 1 /* clean I+D single entry */
262 mcr p15, 0, r0, c7, c7, 1 /* flush I+D single entry */
263 msr cpsr_all, r3
264 RET
265 END(arm8_cache_purgeID_E)
266
267 /*
268 * Context switch.
269 *
270 * These is the CPU-specific parts of the context switcher cpu_switch()
271 * These functions actually perform the TTB reload.
272 *
273 * NOTE: Special calling convention
274 * r1, r4-r13 must be preserved
275 */
276 ENTRY(arm8_context_switch)
277 /* For good measure we will flush the IDC as well */
278 mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */
279
280 /* Write the TTB */
281 mcr p15, 0, r0, c2, c0, 0
282
283 /* If we have updated the TTB we must flush the TLB */
284 mcr p15, 0, r0, c8, c7, 0 /* flush the I+D tlb */
285
286 #if 0
287 /* For good measure we will flush the IDC as well */
288 mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */
289 #endif
290
291 /* Make sure that pipeline is emptied */
292 mov r0, r0
293 mov r0, r0
294 RET
295 END(arm8_context_switch)
296
Cache object: 2c68e013ad3ff958616e3e5ae679179e
|