Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef _ASM_GENERIC_PERCPU_H_ |
2 | #define _ASM_GENERIC_PERCPU_H_ | |
5028eaa9 | 3 | |
1da177e4 | 4 | #include <linux/compiler.h> |
ae1ee11b | 5 | #include <linux/threads.h> |
5028eaa9 | 6 | #include <linux/percpu-defs.h> |
acdac872 | 7 | |
1da177e4 LT |
8 | #ifdef CONFIG_SMP |
9 | ||
acdac872 | 10 | /* |
11 | * per_cpu_offset() is the offset that has to be added to a | |
12 | * percpu variable to get to the instance for a certain processor. | |
13 | * | |
14 | * Most arches use the __per_cpu_offset array for those offsets but | |
15 | * some arches have their own ways of determining the offset (x86_64, s390). | |
16 | */ | |
17 | #ifndef __per_cpu_offset | |
1da177e4 LT |
18 | extern unsigned long __per_cpu_offset[NR_CPUS]; |
19 | ||
a875a69f | 20 | #define per_cpu_offset(x) (__per_cpu_offset[x]) |
acdac872 | 21 | #endif |
22 | ||
23 | /* | |
24 | * Determine the offset for the currently active processor. | |
25 | * An arch may define __my_cpu_offset to provide a more effective | |
26 | * means of obtaining the offset to the per cpu variables of the | |
27 | * current processor. | |
28 | */ | |
29 | #ifndef __my_cpu_offset | |
30 | #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) | |
1e835278 HD |
31 | #endif |
32 | #ifdef CONFIG_DEBUG_PREEMPT | |
acdac872 | 33 | #define my_cpu_offset per_cpu_offset(smp_processor_id()) |
34 | #else | |
35 | #define my_cpu_offset __my_cpu_offset | |
36 | #endif | |
37 | ||
38 | /* | |
39 | * Add a offset to a pointer but keep the pointer as is. | |
40 | * | |
41 | * Only S390 provides its own means of moving the pointer. | |
42 | */ | |
43 | #ifndef SHIFT_PERCPU_PTR | |
44 | #define SHIFT_PERCPU_PTR(__p, __offset) RELOC_HIDE((__p), (__offset)) | |
45 | #endif | |
a875a69f | 46 | |
acdac872 | 47 | /* |
dd5af90a | 48 | * A percpu variable may point to a discarded regions. The following are |
acdac872 | 49 | * established ways to produce a usable pointer from the percpu variable |
50 | * offset. | |
51 | */ | |
52 | #define per_cpu(var, cpu) \ | |
53 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), per_cpu_offset(cpu))) | |
54 | #define __get_cpu_var(var) \ | |
55 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), my_cpu_offset)) | |
56 | #define __raw_get_cpu_var(var) \ | |
57 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), __my_cpu_offset)) | |
58 | ||
59 | ||
dd5af90a | 60 | #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA |
acdac872 | 61 | extern void setup_per_cpu_areas(void); |
62 | #endif | |
1da177e4 | 63 | |
1da177e4 LT |
64 | #else /* ! SMP */ |
65 | ||
acdac872 | 66 | #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var))) |
67 | #define __get_cpu_var(var) per_cpu_var(var) | |
68 | #define __raw_get_cpu_var(var) per_cpu_var(var) | |
1da177e4 LT |
69 | |
70 | #endif /* SMP */ | |
71 | ||
9b8de747 DH |
72 | #ifndef PER_CPU_BASE_SECTION |
73 | #ifdef CONFIG_SMP | |
74 | #define PER_CPU_BASE_SECTION ".data.percpu" | |
75 | #else | |
76 | #define PER_CPU_BASE_SECTION ".data" | |
77 | #endif | |
78 | #endif | |
79 | ||
80 | #ifdef CONFIG_SMP | |
81 | ||
82 | #ifdef MODULE | |
83 | #define PER_CPU_SHARED_ALIGNED_SECTION "" | |
84 | #else | |
85 | #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned" | |
86 | #endif | |
87 | #define PER_CPU_FIRST_SECTION ".first" | |
88 | ||
89 | #else | |
90 | ||
91 | #define PER_CPU_SHARED_ALIGNED_SECTION "" | |
92 | #define PER_CPU_FIRST_SECTION "" | |
93 | ||
94 | #endif | |
95 | ||
acdac872 | 96 | #ifndef PER_CPU_ATTRIBUTES |
97 | #define PER_CPU_ATTRIBUTES | |
98 | #endif | |
99 | ||
1da177e4 | 100 | #endif /* _ASM_GENERIC_PERCPU_H_ */ |