Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ARCH_S390_PERCPU__ |
2 | #define __ARCH_S390_PERCPU__ | |
3 | ||
4c2241fd HC |
4 | #include <linux/preempt.h> |
5 | #include <asm/cmpxchg.h> | |
6 | ||
1da177e4 LT |
7 | /* |
8 | * s390 uses its own implementation for per cpu data, the offset of | |
9 | * the cpu local data area is cached in the cpu's lowcore memory. | |
1da177e4 | 10 | */ |
9a0ef292 | 11 | #define __my_cpu_offset S390_lowcore.percpu_offset |
1da177e4 | 12 | |
9a0ef292 TH |
13 | /* |
14 | * For 64 bit module code, the module may be more than 4G above the | |
15 | * per cpu area, use weak definitions to force the compiler to | |
16 | * generate external references. | |
17 | */ | |
18 | #if defined(CONFIG_SMP) && defined(__s390x__) && defined(MODULE) | |
19 | #define ARCH_NEEDS_WEAK_PER_CPU | |
1da177e4 LT |
20 | #endif |
21 | ||
933393f5 | 22 | #define arch_this_cpu_to_op(pcp, val, op) \ |
4c2241fd HC |
23 | do { \ |
24 | typedef typeof(pcp) pcp_op_T__; \ | |
25 | pcp_op_T__ old__, new__, prev__; \ | |
26 | pcp_op_T__ *ptr__; \ | |
27 | preempt_disable(); \ | |
28 | ptr__ = __this_cpu_ptr(&(pcp)); \ | |
29 | prev__ = *ptr__; \ | |
30 | do { \ | |
31 | old__ = prev__; \ | |
32 | new__ = old__ op (val); \ | |
33 | switch (sizeof(*ptr__)) { \ | |
34 | case 8: \ | |
35 | prev__ = cmpxchg64(ptr__, old__, new__); \ | |
36 | break; \ | |
37 | default: \ | |
38 | prev__ = cmpxchg(ptr__, old__, new__); \ | |
39 | } \ | |
40 | } while (prev__ != old__); \ | |
41 | preempt_enable(); \ | |
42 | } while (0) | |
43 | ||
933393f5 CL |
44 | #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op(pcp, val, +) |
45 | #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op(pcp, val, +) | |
46 | #define this_cpu_add_4(pcp, val) arch_this_cpu_to_op(pcp, val, +) | |
47 | #define this_cpu_add_8(pcp, val) arch_this_cpu_to_op(pcp, val, +) | |
4c2241fd | 48 | |
933393f5 CL |
49 | #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op(pcp, val, &) |
50 | #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op(pcp, val, &) | |
51 | #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op(pcp, val, &) | |
52 | #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op(pcp, val, &) | |
4c2241fd | 53 | |
933393f5 CL |
54 | #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op(pcp, val, |) |
55 | #define this_cpu_or_2(pcp, val) arch_this_cpu_to_op(pcp, val, |) | |
56 | #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op(pcp, val, |) | |
57 | #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op(pcp, val, |) | |
4c2241fd | 58 | |
933393f5 CL |
59 | #define this_cpu_xor_1(pcp, val) arch_this_cpu_to_op(pcp, val, ^) |
60 | #define this_cpu_xor_2(pcp, val) arch_this_cpu_to_op(pcp, val, ^) | |
61 | #define this_cpu_xor_4(pcp, val) arch_this_cpu_to_op(pcp, val, ^) | |
62 | #define this_cpu_xor_8(pcp, val) arch_this_cpu_to_op(pcp, val, ^) | |
4c2241fd | 63 | |
933393f5 | 64 | #define arch_this_cpu_cmpxchg(pcp, oval, nval) \ |
4c2241fd HC |
65 | ({ \ |
66 | typedef typeof(pcp) pcp_op_T__; \ | |
67 | pcp_op_T__ ret__; \ | |
68 | pcp_op_T__ *ptr__; \ | |
69 | preempt_disable(); \ | |
70 | ptr__ = __this_cpu_ptr(&(pcp)); \ | |
71 | switch (sizeof(*ptr__)) { \ | |
72 | case 8: \ | |
73 | ret__ = cmpxchg64(ptr__, oval, nval); \ | |
74 | break; \ | |
75 | default: \ | |
76 | ret__ = cmpxchg(ptr__, oval, nval); \ | |
77 | } \ | |
78 | preempt_enable(); \ | |
79 | ret__; \ | |
80 | }) | |
81 | ||
933393f5 CL |
82 | #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
83 | #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
84 | #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
85 | #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
4c2241fd | 86 | |
f0343474 | 87 | #include <asm-generic/percpu.h> |
1da177e4 | 88 | |
1da177e4 | 89 | #endif /* __ARCH_S390_PERCPU__ */ |