1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1999 Ralf Baechle
7 * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
8 * Copyright (C) 1999 Silicon Graphics, Inc.
9 */
10 #ifndef _ASM_STACKFRAME_H
11 #define _ASM_STACKFRAME_H
12
13 #include <linux/config.h>
14 #include <linux/threads.h>
15
16 #include <asm/asm.h>
17 #include <asm/offset.h>
18 #include <asm/processor.h>
19 #include <asm/addrspace.h>
20
21 #ifndef __ASSEMBLY__
22
23 #define __str2(x) #x
24 #define __str(x) __str2(x)
25
26 #define save_static(frame) \
27 __asm__ __volatile__( \
28 "sd\t$16,"__str(PT_R16)"(%0)\n\t" \
29 "sd\t$17,"__str(PT_R17)"(%0)\n\t" \
30 "sd\t$18,"__str(PT_R18)"(%0)\n\t" \
31 "sd\t$19,"__str(PT_R19)"(%0)\n\t" \
32 "sd\t$20,"__str(PT_R20)"(%0)\n\t" \
33 "sd\t$21,"__str(PT_R21)"(%0)\n\t" \
34 "sd\t$22,"__str(PT_R22)"(%0)\n\t" \
35 "sd\t$23,"__str(PT_R23)"(%0)\n\t" \
36 "sd\t$30,"__str(PT_R30)"(%0)\n\t" \
37 : /* No outputs */ \
38 : "r" (frame))
39
40 #endif /* !__ASSEMBLY__ */
41
42 #ifdef __ASSEMBLY__
43
44 .macro SAVE_AT
45 .set push
46 .set noat
47 sd $1, PT_R1(sp)
48 .set pop
49 .endm
50
51 .macro SAVE_TEMP
52 mfhi v1
53 sd $8, PT_R8(sp)
54 sd $9, PT_R9(sp)
55 sd v1, PT_HI(sp)
56 mflo v1
57 sd $10, PT_R10(sp)
58 sd $11, PT_R11(sp)
59 sd v1, PT_LO(sp)
60 sd $12, PT_R12(sp)
61 sd $13, PT_R13(sp)
62 sd $14, PT_R14(sp)
63 sd $15, PT_R15(sp)
64 sd $24, PT_R24(sp)
65 .endm
66
67 .macro SAVE_STATIC
68 sd $16, PT_R16(sp)
69 sd $17, PT_R17(sp)
70 sd $18, PT_R18(sp)
71 sd $19, PT_R19(sp)
72 sd $20, PT_R20(sp)
73 sd $21, PT_R21(sp)
74 sd $22, PT_R22(sp)
75 sd $23, PT_R23(sp)
76 sd $30, PT_R30(sp)
77 .endm
78
79 #ifdef CONFIG_SMP
80 .macro get_saved_sp /* SMP variation */
81 dmfc0 k1, CP0_CONTEXT
82 dsra k1, 23
83 lui k0, %hi(pgd_current)
84 daddiu k0, %lo(pgd_current)
85 dsubu k1, k0
86 lui k0, %hi(kernelsp)
87 daddu k1, k0
88 ld k1, %lo(kernelsp)(k1)
89 .endm
90
91 .macro set_saved_sp stackp temp temp2
92 lw \temp, TASK_PROCESSOR(gp)
93 dsll \temp, 3
94 lui \temp2, %hi(kernelsp)
95 daddu \temp, \temp2
96 sd \stackp, %lo(kernelsp)(\temp)
97 .endm
98 #else
99 .macro get_saved_sp /* Uniprocessor variation */
100 lui k1, %hi(kernelsp)
101 ld k1, %lo(kernelsp)(k1)
102 .endm
103
104 .macro set_saved_sp stackp temp temp2
105 sd \stackp, kernelsp
106 .endm
107 #endif
108 .macro declare_saved_sp
109 .comm kernelsp, NR_CPUS * 8, 8
110 .endm
111
112 .macro SAVE_SOME
113 .set push
114 .set reorder
115 mfc0 k0, CP0_STATUS
116 sll k0, 3 /* extract cu0 bit */
117 .set noreorder
118 bltz k0, 8f
119 move k1, sp
120 .set reorder
121 /* Called from user mode, new stack. */
122 get_saved_sp
123 8: move k0, sp
124 dsubu sp, k1, PT_SIZE
125 sd k0, PT_R29(sp)
126 sd $3, PT_R3(sp)
127 sd $0, PT_R0(sp)
128 mfc0 v1, CP0_STATUS
129 sd $2, PT_R2(sp)
130 sd v1, PT_STATUS(sp)
131 sd $4, PT_R4(sp)
132 mfc0 v1, CP0_CAUSE
133 sd $5, PT_R5(sp)
134 sd v1, PT_CAUSE(sp)
135 sd $6, PT_R6(sp)
136 dmfc0 v1, CP0_EPC
137 sd $7, PT_R7(sp)
138 sd v1, PT_EPC(sp)
139 sd $25, PT_R25(sp)
140 sd $28, PT_R28(sp)
141 sd $31, PT_R31(sp)
142 ori $28, sp, 0x3fff
143 xori $28, 0x3fff
144 .set pop
145 .endm
146
147 .macro SAVE_ALL
148 SAVE_SOME
149 SAVE_AT
150 SAVE_TEMP
151 SAVE_STATIC
152 .endm
153
154 .macro RESTORE_AT
155 .set push
156 .set noat
157 ld $1, PT_R1(sp)
158 .set pop
159 .endm
160
161 .macro RESTORE_SP
162 ld sp, PT_R29(sp)
163 .endm
164
165 .macro RESTORE_TEMP
166 ld $24, PT_LO(sp)
167 ld $8, PT_R8(sp)
168 ld $9, PT_R9(sp)
169 mtlo $24
170 ld $24, PT_HI(sp)
171 ld $10, PT_R10(sp)
172 ld $11, PT_R11(sp)
173 mthi $24
174 ld $12, PT_R12(sp)
175 ld $13, PT_R13(sp)
176 ld $14, PT_R14(sp)
177 ld $15, PT_R15(sp)
178 ld $24, PT_R24(sp)
179 .endm
180
181 .macro RESTORE_STATIC
182 ld $16, PT_R16(sp)
183 ld $17, PT_R17(sp)
184 ld $18, PT_R18(sp)
185 ld $19, PT_R19(sp)
186 ld $20, PT_R20(sp)
187 ld $21, PT_R21(sp)
188 ld $22, PT_R22(sp)
189 ld $23, PT_R23(sp)
190 ld $30, PT_R30(sp)
191 .endm
192
193 .macro RESTORE_SOME
194 .set push
195 .set reorder
196 mfc0 t0, CP0_STATUS
197 .set pop
198 ori t0, 0x1f
199 xori t0, 0x1f
200 mtc0 t0, CP0_STATUS
201 li v1, 0xff00
202 and t0, v1
203 ld v0, PT_STATUS(sp)
204 nor v1, $0, v1
205 and v0, v1
206 or v0, t0
207 mtc0 v0, CP0_STATUS
208 ld v1, PT_EPC(sp)
209 dmtc0 v1, CP0_EPC
210 ld $31, PT_R31(sp)
211 ld $28, PT_R28(sp)
212 ld $25, PT_R25(sp)
213 ld $7, PT_R7(sp)
214 ld $6, PT_R6(sp)
215 ld $5, PT_R5(sp)
216 ld $4, PT_R4(sp)
217 ld $3, PT_R3(sp)
218 ld $2, PT_R2(sp)
219 .endm
220
221 .macro RESTORE_ALL
222 RESTORE_SOME
223 RESTORE_AT
224 RESTORE_TEMP
225 RESTORE_STATIC
226 RESTORE_SP
227 .endm
228
229 /*
230 * Move to kernel mode and disable interrupts.
231 * Set cp0 enable bit as sign that we're running on the kernel stack
232 */
233 .macro CLI
234 mfc0 t0, CP0_STATUS
235 li t1, ST0_CU0|0x1f
236 or t0, t1
237 xori t0, 0x1f
238 mtc0 t0, CP0_STATUS
239 .endm
240
241 /*
242 * Move to kernel mode and enable interrupts.
243 * Set cp0 enable bit as sign that we're running on the kernel stack
244 */
245 .macro STI
246 mfc0 t0, CP0_STATUS
247 li t1, ST0_CU0 | 0x1f
248 or t0, t1
249 xori t0, 0x1e
250 mtc0 t0, CP0_STATUS
251 .endm
252
253 /*
254 * Just move to kernel mode and leave interrupts as they are.
255 * Set cp0 enable bit as sign that we're running on the kernel stack
256 */
257 .macro KMODE
258 mfc0 t0, CP0_STATUS
259 li t1, ST0_CU0 | 0x1e
260 or t0, t1
261 xori t0, 0x1e
262 mtc0 t0, CP0_STATUS
263 .endm
264
265 #endif /* __ASSEMBLY__ */
266
267 #endif /* _ASM_STACKFRAME_H */
Cache object: 542b38c4b666f7201ddb2485aadd9cdb
|