HUB interrupts are allocated per node, not per slice. Make
[deliverable/linux.git] / include / asm-mips / stackframe.h
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
7 * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
8 * Copyright (C) 1999 Silicon Graphics, Inc.
9 */
10#ifndef _ASM_STACKFRAME_H
11#define _ASM_STACKFRAME_H
12
13#include <linux/config.h>
14#include <linux/threads.h>
15
16#include <asm/asm.h>
17#include <asm/mipsregs.h>
048eb582 18#include <asm/asm-offsets.h>
1da177e4
LT
19
20 .macro SAVE_AT
21 .set push
22 .set noat
23 LONG_S $1, PT_R1(sp)
24 .set pop
25 .endm
26
27 .macro SAVE_TEMP
28 mfhi v1
875d43e7 29#ifdef CONFIG_32BIT
1da177e4
LT
30 LONG_S $8, PT_R8(sp)
31 LONG_S $9, PT_R9(sp)
32#endif
33 LONG_S v1, PT_HI(sp)
34 mflo v1
35 LONG_S $10, PT_R10(sp)
36 LONG_S $11, PT_R11(sp)
37 LONG_S v1, PT_LO(sp)
38 LONG_S $12, PT_R12(sp)
39 LONG_S $13, PT_R13(sp)
40 LONG_S $14, PT_R14(sp)
41 LONG_S $15, PT_R15(sp)
42 LONG_S $24, PT_R24(sp)
43 .endm
44
45 .macro SAVE_STATIC
46 LONG_S $16, PT_R16(sp)
47 LONG_S $17, PT_R17(sp)
48 LONG_S $18, PT_R18(sp)
49 LONG_S $19, PT_R19(sp)
50 LONG_S $20, PT_R20(sp)
51 LONG_S $21, PT_R21(sp)
52 LONG_S $22, PT_R22(sp)
53 LONG_S $23, PT_R23(sp)
54 LONG_S $30, PT_R30(sp)
55 .endm
56
57#ifdef CONFIG_SMP
58 .macro get_saved_sp /* SMP variation */
875d43e7 59#ifdef CONFIG_32BIT
1da177e4
LT
60 mfc0 k0, CP0_CONTEXT
61 lui k1, %hi(kernelsp)
62 srl k0, k0, 23
63 sll k0, k0, 2
64 addu k1, k0
65 LONG_L k1, %lo(kernelsp)(k1)
66#endif
875d43e7 67#if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
1da177e4
LT
68 MFC0 k1, CP0_CONTEXT
69 dsra k1, 23
70 lui k0, %hi(pgd_current)
71 addiu k0, %lo(pgd_current)
72 dsubu k1, k0
73 lui k0, %hi(kernelsp)
74 daddu k1, k0
75 LONG_L k1, %lo(kernelsp)(k1)
76#endif
875d43e7 77#if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
1da177e4
LT
78 MFC0 k1, CP0_CONTEXT
79 dsrl k1, 23
80 dsll k1, k1, 3
85b6e818
RB
81 lui k0, %highest(kernelsp)
82 daddiu k0, %higher(kernelsp)
83 dsll k0, k0, 16
84 daddiu k0, %hi(kernelsp)
85 daddu k1, k1, k0
86 LONG_L k1, %lo(kernelsp)(k1)
1da177e4
LT
87#endif
88 .endm
89
90 .macro set_saved_sp stackp temp temp2
875d43e7 91#ifdef CONFIG_32BIT
1da177e4
LT
92 mfc0 \temp, CP0_CONTEXT
93 srl \temp, 23
94 sll \temp, 2
95 LONG_S \stackp, kernelsp(\temp)
96#endif
875d43e7 97#if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
1da177e4
LT
98 lw \temp, TI_CPU(gp)
99 dsll \temp, 3
100 lui \temp2, %hi(kernelsp)
101 daddu \temp, \temp2
102 LONG_S \stackp, %lo(kernelsp)(\temp)
103#endif
875d43e7 104#if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
1da177e4
LT
105 lw \temp, TI_CPU(gp)
106 dsll \temp, 3
107 LONG_S \stackp, kernelsp(\temp)
108#endif
109 .endm
110#else
111 .macro get_saved_sp /* Uniprocessor variation */
112 lui k1, %hi(kernelsp)
113 LONG_L k1, %lo(kernelsp)(k1)
114 .endm
115
116 .macro set_saved_sp stackp temp temp2
117 LONG_S \stackp, kernelsp
118 .endm
119#endif
120
121 .macro SAVE_SOME
122 .set push
123 .set noat
124 .set reorder
125 mfc0 k0, CP0_STATUS
126 sll k0, 3 /* extract cu0 bit */
127 .set noreorder
128 bltz k0, 8f
129 move k1, sp
130 .set reorder
131 /* Called from user mode, new stack. */
132 get_saved_sp
1338: move k0, sp
134 PTR_SUBU sp, k1, PT_SIZE
135 LONG_S k0, PT_R29(sp)
136 LONG_S $3, PT_R3(sp)
137 LONG_S $0, PT_R0(sp)
138 mfc0 v1, CP0_STATUS
139 LONG_S $2, PT_R2(sp)
140 LONG_S v1, PT_STATUS(sp)
141 LONG_S $4, PT_R4(sp)
142 mfc0 v1, CP0_CAUSE
143 LONG_S $5, PT_R5(sp)
144 LONG_S v1, PT_CAUSE(sp)
145 LONG_S $6, PT_R6(sp)
146 MFC0 v1, CP0_EPC
147 LONG_S $7, PT_R7(sp)
875d43e7 148#ifdef CONFIG_64BIT
1da177e4
LT
149 LONG_S $8, PT_R8(sp)
150 LONG_S $9, PT_R9(sp)
151#endif
152 LONG_S v1, PT_EPC(sp)
153 LONG_S $25, PT_R25(sp)
154 LONG_S $28, PT_R28(sp)
155 LONG_S $31, PT_R31(sp)
156 ori $28, sp, _THREAD_MASK
157 xori $28, _THREAD_MASK
158 .set pop
159 .endm
160
161 .macro SAVE_ALL
162 SAVE_SOME
163 SAVE_AT
164 SAVE_TEMP
165 SAVE_STATIC
166 .endm
167
168 .macro RESTORE_AT
169 .set push
170 .set noat
171 LONG_L $1, PT_R1(sp)
172 .set pop
173 .endm
174
175 .macro RESTORE_TEMP
176 LONG_L $24, PT_LO(sp)
875d43e7 177#ifdef CONFIG_32BIT
1da177e4
LT
178 LONG_L $8, PT_R8(sp)
179 LONG_L $9, PT_R9(sp)
180#endif
181 mtlo $24
182 LONG_L $24, PT_HI(sp)
183 LONG_L $10, PT_R10(sp)
184 LONG_L $11, PT_R11(sp)
185 mthi $24
186 LONG_L $12, PT_R12(sp)
187 LONG_L $13, PT_R13(sp)
188 LONG_L $14, PT_R14(sp)
189 LONG_L $15, PT_R15(sp)
190 LONG_L $24, PT_R24(sp)
191 .endm
192
193 .macro RESTORE_STATIC
194 LONG_L $16, PT_R16(sp)
195 LONG_L $17, PT_R17(sp)
196 LONG_L $18, PT_R18(sp)
197 LONG_L $19, PT_R19(sp)
198 LONG_L $20, PT_R20(sp)
199 LONG_L $21, PT_R21(sp)
200 LONG_L $22, PT_R22(sp)
201 LONG_L $23, PT_R23(sp)
202 LONG_L $30, PT_R30(sp)
203 .endm
204
205#if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
206
207 .macro RESTORE_SOME
208 .set push
209 .set reorder
210 .set noat
211 mfc0 a0, CP0_STATUS
212 ori a0, 0x1f
213 xori a0, 0x1f
214 mtc0 a0, CP0_STATUS
215 li v1, 0xff00
216 and a0, v1
217 LONG_L v0, PT_STATUS(sp)
218 nor v1, $0, v1
219 and v0, v1
220 or v0, a0
221 mtc0 v0, CP0_STATUS
222 LONG_L $31, PT_R31(sp)
223 LONG_L $28, PT_R28(sp)
224 LONG_L $25, PT_R25(sp)
875d43e7 225#ifdef CONFIG_64BIT
1da177e4
LT
226 LONG_L $8, PT_R8(sp)
227 LONG_L $9, PT_R9(sp)
228#endif
229 LONG_L $7, PT_R7(sp)
230 LONG_L $6, PT_R6(sp)
231 LONG_L $5, PT_R5(sp)
232 LONG_L $4, PT_R4(sp)
233 LONG_L $3, PT_R3(sp)
234 LONG_L $2, PT_R2(sp)
235 .set pop
236 .endm
237
238 .macro RESTORE_SP_AND_RET
239 .set push
240 .set noreorder
241 LONG_L k0, PT_EPC(sp)
242 LONG_L sp, PT_R29(sp)
243 jr k0
244 rfe
245 .set pop
246 .endm
247
248#else
249
250 .macro RESTORE_SOME
251 .set push
252 .set reorder
253 .set noat
254 mfc0 a0, CP0_STATUS
255 ori a0, 0x1f
256 xori a0, 0x1f
257 mtc0 a0, CP0_STATUS
258 li v1, 0xff00
259 and a0, v1
260 LONG_L v0, PT_STATUS(sp)
261 nor v1, $0, v1
262 and v0, v1
263 or v0, a0
264 mtc0 v0, CP0_STATUS
265 LONG_L v1, PT_EPC(sp)
266 MTC0 v1, CP0_EPC
267 LONG_L $31, PT_R31(sp)
268 LONG_L $28, PT_R28(sp)
269 LONG_L $25, PT_R25(sp)
875d43e7 270#ifdef CONFIG_64BIT
1da177e4
LT
271 LONG_L $8, PT_R8(sp)
272 LONG_L $9, PT_R9(sp)
273#endif
274 LONG_L $7, PT_R7(sp)
275 LONG_L $6, PT_R6(sp)
276 LONG_L $5, PT_R5(sp)
277 LONG_L $4, PT_R4(sp)
278 LONG_L $3, PT_R3(sp)
279 LONG_L $2, PT_R2(sp)
280 .set pop
281 .endm
282
283 .macro RESTORE_SP_AND_RET
284 LONG_L sp, PT_R29(sp)
285 .set mips3
286 eret
287 .set mips0
288 .endm
289
290#endif
291
292 .macro RESTORE_SP
293 LONG_L sp, PT_R29(sp)
294 .endm
295
296 .macro RESTORE_ALL
297 RESTORE_TEMP
298 RESTORE_STATIC
299 RESTORE_AT
300 RESTORE_SOME
301 RESTORE_SP
302 .endm
303
304 .macro RESTORE_ALL_AND_RET
305 RESTORE_TEMP
306 RESTORE_STATIC
307 RESTORE_AT
308 RESTORE_SOME
309 RESTORE_SP_AND_RET
310 .endm
311
312/*
313 * Move to kernel mode and disable interrupts.
314 * Set cp0 enable bit as sign that we're running on the kernel stack
315 */
316 .macro CLI
317 mfc0 t0, CP0_STATUS
318 li t1, ST0_CU0 | 0x1f
319 or t0, t1
320 xori t0, 0x1f
321 mtc0 t0, CP0_STATUS
322 irq_disable_hazard
323 .endm
324
325/*
326 * Move to kernel mode and enable interrupts.
327 * Set cp0 enable bit as sign that we're running on the kernel stack
328 */
329 .macro STI
330 mfc0 t0, CP0_STATUS
331 li t1, ST0_CU0 | 0x1f
332 or t0, t1
333 xori t0, 0x1e
334 mtc0 t0, CP0_STATUS
335 irq_enable_hazard
336 .endm
337
338/*
339 * Just move to kernel mode and leave interrupts as they are.
340 * Set cp0 enable bit as sign that we're running on the kernel stack
341 */
342 .macro KMODE
343 mfc0 t0, CP0_STATUS
344 li t1, ST0_CU0 | 0x1e
345 or t0, t1
346 xori t0, 0x1e
347 mtc0 t0, CP0_STATUS
348 irq_disable_hazard
349 .endm
350
351#endif /* _ASM_STACKFRAME_H */
This page took 0.071219 seconds and 5 git commands to generate.