i386: Remove strrchr assembler implementation
[deliverable/linux.git] / include / asm-sh64 / system.h
1 #ifndef __ASM_SH64_SYSTEM_H
2 #define __ASM_SH64_SYSTEM_H
3
4 /*
5 * This file is subject to the terms and conditions of the GNU General Public
6 * License. See the file "COPYING" in the main directory of this archive
7 * for more details.
8 *
9 * include/asm-sh64/system.h
10 *
11 * Copyright (C) 2000, 2001 Paolo Alberelli
12 * Copyright (C) 2003 Paul Mundt
13 * Copyright (C) 2004 Richard Curnow
14 *
15 */
16
17 #include <asm/registers.h>
18 #include <asm/processor.h>
19
20 /*
21 * switch_to() should switch tasks to task nr n, first
22 */
23
24 typedef struct {
25 unsigned long seg;
26 } mm_segment_t;
27
28 extern struct task_struct *sh64_switch_to(struct task_struct *prev,
29 struct thread_struct *prev_thread,
30 struct task_struct *next,
31 struct thread_struct *next_thread);
32
33 #define switch_to(prev,next,last) \
34 do {\
35 if (last_task_used_math != next) {\
36 struct pt_regs *regs = next->thread.uregs;\
37 if (regs) regs->sr |= SR_FD;\
38 }\
39 last = sh64_switch_to(prev, &prev->thread, next, &next->thread);\
40 } while(0)
41
42 #define nop() __asm__ __volatile__ ("nop")
43
44 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
45
46 extern void __xchg_called_with_bad_pointer(void);
47
48 #define mb() __asm__ __volatile__ ("synco": : :"memory")
49 #define rmb() mb()
50 #define wmb() __asm__ __volatile__ ("synco": : :"memory")
51 #define read_barrier_depends() do { } while (0)
52
53 #ifdef CONFIG_SMP
54 #define smp_mb() mb()
55 #define smp_rmb() rmb()
56 #define smp_wmb() wmb()
57 #define smp_read_barrier_depends() read_barrier_depends()
58 #else
59 #define smp_mb() barrier()
60 #define smp_rmb() barrier()
61 #define smp_wmb() barrier()
62 #define smp_read_barrier_depends() do { } while (0)
63 #endif /* CONFIG_SMP */
64
65 #define set_rmb(var, value) do { (void)xchg(&var, value); } while (0)
66 #define set_mb(var, value) set_rmb(var, value)
67
68 /* Interrupt Control */
69 #ifndef HARD_CLI
70 #define SR_MASK_L 0x000000f0L
71 #define SR_MASK_LL 0x00000000000000f0LL
72 #else
73 #define SR_MASK_L 0x10000000L
74 #define SR_MASK_LL 0x0000000010000000LL
75 #endif
76
77 static __inline__ void local_irq_enable(void)
78 {
79 /* cli/sti based on SR.BL */
80 unsigned long long __dummy0, __dummy1=~SR_MASK_LL;
81
82 __asm__ __volatile__("getcon " __SR ", %0\n\t"
83 "and %0, %1, %0\n\t"
84 "putcon %0, " __SR "\n\t"
85 : "=&r" (__dummy0)
86 : "r" (__dummy1));
87 }
88
89 static __inline__ void local_irq_disable(void)
90 {
91 /* cli/sti based on SR.BL */
92 unsigned long long __dummy0, __dummy1=SR_MASK_LL;
93 __asm__ __volatile__("getcon " __SR ", %0\n\t"
94 "or %0, %1, %0\n\t"
95 "putcon %0, " __SR "\n\t"
96 : "=&r" (__dummy0)
97 : "r" (__dummy1));
98 }
99
100 #define local_save_flags(x) \
101 (__extension__ ({ unsigned long long __dummy=SR_MASK_LL; \
102 __asm__ __volatile__( \
103 "getcon " __SR ", %0\n\t" \
104 "and %0, %1, %0" \
105 : "=&r" (x) \
106 : "r" (__dummy));}))
107
108 #define local_irq_save(x) \
109 (__extension__ ({ unsigned long long __d2=SR_MASK_LL, __d1; \
110 __asm__ __volatile__( \
111 "getcon " __SR ", %1\n\t" \
112 "or %1, r63, %0\n\t" \
113 "or %1, %2, %1\n\t" \
114 "putcon %1, " __SR "\n\t" \
115 "and %0, %2, %0" \
116 : "=&r" (x), "=&r" (__d1) \
117 : "r" (__d2));}));
118
119 #define local_irq_restore(x) do { \
120 if ( ((x) & SR_MASK_L) == 0 ) /* dropping to 0 ? */ \
121 local_irq_enable(); /* yes...re-enable */ \
122 } while (0)
123
124 #define irqs_disabled() \
125 ({ \
126 unsigned long flags; \
127 local_save_flags(flags); \
128 (flags != 0); \
129 })
130
131 static inline unsigned long xchg_u32(volatile int * m, unsigned long val)
132 {
133 unsigned long flags, retval;
134
135 local_irq_save(flags);
136 retval = *m;
137 *m = val;
138 local_irq_restore(flags);
139 return retval;
140 }
141
142 static inline unsigned long xchg_u8(volatile unsigned char * m, unsigned long val)
143 {
144 unsigned long flags, retval;
145
146 local_irq_save(flags);
147 retval = *m;
148 *m = val & 0xff;
149 local_irq_restore(flags);
150 return retval;
151 }
152
153 static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
154 {
155 switch (size) {
156 case 4:
157 return xchg_u32(ptr, x);
158 break;
159 case 1:
160 return xchg_u8(ptr, x);
161 break;
162 }
163 __xchg_called_with_bad_pointer();
164 return x;
165 }
166
167 /* XXX
168 * disable hlt during certain critical i/o operations
169 */
170 #define HAVE_DISABLE_HLT
171 void disable_hlt(void);
172 void enable_hlt(void);
173
174
175 #define smp_mb() barrier()
176 #define smp_rmb() barrier()
177 #define smp_wmb() barrier()
178
179 #ifdef CONFIG_SH_ALPHANUMERIC
180 /* This is only used for debugging. */
181 extern void print_seg(char *file,int line);
182 #define PLS() print_seg(__FILE__,__LINE__)
183 #else /* CONFIG_SH_ALPHANUMERIC */
184 #define PLS()
185 #endif /* CONFIG_SH_ALPHANUMERIC */
186
187 #define PL() printk("@ <%s,%s:%d>\n",__FILE__,__FUNCTION__,__LINE__)
188
189 #define arch_align_stack(x) (x)
190
191 #endif /* __ASM_SH64_SYSTEM_H */
This page took 0.035052 seconds and 5 git commands to generate.