Merge tag 'efi-next' of git://git.kernel.org/pub/scm/linux/kernel/git/mfleming/efi...
[deliverable/linux.git] / arch / mips / kernel / r4k_switch.S
1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com
12 */
13 #include <asm/asm.h>
14 #include <asm/cachectl.h>
15 #include <asm/fpregdef.h>
16 #include <asm/mipsregs.h>
17 #include <asm/asm-offsets.h>
18 #include <asm/pgtable-bits.h>
19 #include <asm/regdef.h>
20 #include <asm/stackframe.h>
21 #include <asm/thread_info.h>
22
23 #include <asm/asmmacro.h>
24
25 /* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
26 #undef fp
27
28 /*
29 * Offset to the current process status flags, the first 32 bytes of the
30 * stack are not used.
31 */
32 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
33
34 #ifndef USE_ALTERNATE_RESUME_IMPL
35 /*
36 * task_struct *resume(task_struct *prev, task_struct *next,
37 * struct thread_info *next_ti)
38 */
39 .align 5
40 LEAF(resume)
41 mfc0 t1, CP0_STATUS
42 LONG_S t1, THREAD_STATUS(a0)
43 cpu_save_nonscratch a0
44 LONG_S ra, THREAD_REG31(a0)
45
46 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
47 PTR_LA t8, __stack_chk_guard
48 LONG_L t9, TASK_STACK_CANARY(a1)
49 LONG_S t9, 0(t8)
50 #endif
51
52 /*
53 * The order of restoring the registers takes care of the race
54 * updating $28, $29 and kernelsp without disabling ints.
55 */
56 move $28, a2
57 cpu_restore_nonscratch a1
58
59 PTR_ADDU t0, $28, _THREAD_SIZE - 32
60 set_saved_sp t0, t1, t2
61 mfc0 t1, CP0_STATUS /* Do we really need this? */
62 li a3, 0xff01
63 and t1, a3
64 LONG_L a2, THREAD_STATUS(a1)
65 nor a3, $0, a3
66 and a2, a3
67 or a2, t1
68 mtc0 a2, CP0_STATUS
69 move v0, a0
70 jr ra
71 END(resume)
72
73 #endif /* USE_ALTERNATE_RESUME_IMPL */
74
75 /*
76 * Save a thread's fp context.
77 */
78 LEAF(_save_fp)
79 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
80 defined(CONFIG_CPU_MIPS32_R6)
81 mfc0 t0, CP0_STATUS
82 #endif
83 fpu_save_double a0 t0 t1 # clobbers t1
84 jr ra
85 END(_save_fp)
86
87 /*
88 * Restore a thread's fp context.
89 */
90 LEAF(_restore_fp)
91 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
92 defined(CONFIG_CPU_MIPS32_R6)
93 mfc0 t0, CP0_STATUS
94 #endif
95 fpu_restore_double a0 t0 t1 # clobbers t1
96 jr ra
97 END(_restore_fp)
98
99 #ifdef CONFIG_CPU_HAS_MSA
100
101 /*
102 * Save a thread's MSA vector context.
103 */
104 LEAF(_save_msa)
105 msa_save_all a0
106 jr ra
107 END(_save_msa)
108
109 /*
110 * Restore a thread's MSA vector context.
111 */
112 LEAF(_restore_msa)
113 msa_restore_all a0
114 jr ra
115 END(_restore_msa)
116
117 LEAF(_init_msa_upper)
118 msa_init_all_upper
119 jr ra
120 END(_init_msa_upper)
121
122 #endif
123
124 /*
125 * Load the FPU with signalling NANS. This bit pattern we're using has
126 * the property that no matter whether considered as single or as double
127 * precision represents signaling NANS.
128 *
129 * The value to initialize fcr31 to comes in $a0.
130 */
131
132 .set push
133 SET_HARDFLOAT
134
135 LEAF(_init_fpu)
136 mfc0 t0, CP0_STATUS
137 li t1, ST0_CU1
138 or t0, t1
139 mtc0 t0, CP0_STATUS
140 enable_fpu_hazard
141
142 ctc1 a0, fcr31
143
144 li t1, -1 # SNaN
145
146 #ifdef CONFIG_64BIT
147 sll t0, t0, 5
148 bgez t0, 1f # 16 / 32 register mode?
149
150 dmtc1 t1, $f1
151 dmtc1 t1, $f3
152 dmtc1 t1, $f5
153 dmtc1 t1, $f7
154 dmtc1 t1, $f9
155 dmtc1 t1, $f11
156 dmtc1 t1, $f13
157 dmtc1 t1, $f15
158 dmtc1 t1, $f17
159 dmtc1 t1, $f19
160 dmtc1 t1, $f21
161 dmtc1 t1, $f23
162 dmtc1 t1, $f25
163 dmtc1 t1, $f27
164 dmtc1 t1, $f29
165 dmtc1 t1, $f31
166 1:
167 #endif
168
169 #ifdef CONFIG_CPU_MIPS32
170 mtc1 t1, $f0
171 mtc1 t1, $f1
172 mtc1 t1, $f2
173 mtc1 t1, $f3
174 mtc1 t1, $f4
175 mtc1 t1, $f5
176 mtc1 t1, $f6
177 mtc1 t1, $f7
178 mtc1 t1, $f8
179 mtc1 t1, $f9
180 mtc1 t1, $f10
181 mtc1 t1, $f11
182 mtc1 t1, $f12
183 mtc1 t1, $f13
184 mtc1 t1, $f14
185 mtc1 t1, $f15
186 mtc1 t1, $f16
187 mtc1 t1, $f17
188 mtc1 t1, $f18
189 mtc1 t1, $f19
190 mtc1 t1, $f20
191 mtc1 t1, $f21
192 mtc1 t1, $f22
193 mtc1 t1, $f23
194 mtc1 t1, $f24
195 mtc1 t1, $f25
196 mtc1 t1, $f26
197 mtc1 t1, $f27
198 mtc1 t1, $f28
199 mtc1 t1, $f29
200 mtc1 t1, $f30
201 mtc1 t1, $f31
202
203 #if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6)
204 .set push
205 .set MIPS_ISA_LEVEL_RAW
206 .set fp=64
207 sll t0, t0, 5 # is Status.FR set?
208 bgez t0, 1f # no: skip setting upper 32b
209
210 mthc1 t1, $f0
211 mthc1 t1, $f1
212 mthc1 t1, $f2
213 mthc1 t1, $f3
214 mthc1 t1, $f4
215 mthc1 t1, $f5
216 mthc1 t1, $f6
217 mthc1 t1, $f7
218 mthc1 t1, $f8
219 mthc1 t1, $f9
220 mthc1 t1, $f10
221 mthc1 t1, $f11
222 mthc1 t1, $f12
223 mthc1 t1, $f13
224 mthc1 t1, $f14
225 mthc1 t1, $f15
226 mthc1 t1, $f16
227 mthc1 t1, $f17
228 mthc1 t1, $f18
229 mthc1 t1, $f19
230 mthc1 t1, $f20
231 mthc1 t1, $f21
232 mthc1 t1, $f22
233 mthc1 t1, $f23
234 mthc1 t1, $f24
235 mthc1 t1, $f25
236 mthc1 t1, $f26
237 mthc1 t1, $f27
238 mthc1 t1, $f28
239 mthc1 t1, $f29
240 mthc1 t1, $f30
241 mthc1 t1, $f31
242 1: .set pop
243 #endif /* CONFIG_CPU_MIPS32_R2 || CONFIG_CPU_MIPS32_R6 */
244 #else
245 .set MIPS_ISA_ARCH_LEVEL_RAW
246 dmtc1 t1, $f0
247 dmtc1 t1, $f2
248 dmtc1 t1, $f4
249 dmtc1 t1, $f6
250 dmtc1 t1, $f8
251 dmtc1 t1, $f10
252 dmtc1 t1, $f12
253 dmtc1 t1, $f14
254 dmtc1 t1, $f16
255 dmtc1 t1, $f18
256 dmtc1 t1, $f20
257 dmtc1 t1, $f22
258 dmtc1 t1, $f24
259 dmtc1 t1, $f26
260 dmtc1 t1, $f28
261 dmtc1 t1, $f30
262 #endif
263 jr ra
264 END(_init_fpu)
265
266 .set pop /* SET_HARDFLOAT */
This page took 0.038169 seconds and 6 git commands to generate.