Merge branches 'acpi-soc', 'acpi-misc', 'acpi-pci' and 'device-properties'
[deliverable/linux.git] / arch / x86 / include / asm / barrier.h
CommitLineData
f05e798a
DH
1#ifndef _ASM_X86_BARRIER_H
2#define _ASM_X86_BARRIER_H
3
4#include <asm/alternative.h>
5#include <asm/nops.h>
6
7/*
8 * Force strict CPU ordering.
57d9b1b4 9 * And yes, this might be required on UP too when we're talking
f05e798a
DH
10 * to devices.
11 */
12
13#ifdef CONFIG_X86_32
bd922477
MT
14#define mb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "mfence", \
15 X86_FEATURE_XMM2) ::: "memory", "cc")
16#define rmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "lfence", \
17 X86_FEATURE_XMM2) ::: "memory", "cc")
18#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "sfence", \
19 X86_FEATURE_XMM2) ::: "memory", "cc")
f05e798a
DH
20#else
21#define mb() asm volatile("mfence":::"memory")
22#define rmb() asm volatile("lfence":::"memory")
23#define wmb() asm volatile("sfence" ::: "memory")
24#endif
25
f05e798a 26#ifdef CONFIG_X86_PPRO_FENCE
1077fa36 27#define dma_rmb() rmb()
f05e798a 28#else
1077fa36 29#define dma_rmb() barrier()
f05e798a 30#endif
1077fa36
AD
31#define dma_wmb() barrier()
32
1638fb72
MT
33#define __smp_mb() mb()
34#define __smp_rmb() dma_rmb()
35#define __smp_wmb() barrier()
36#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
47933ad4 37
09df7c4c 38#if defined(CONFIG_X86_PPRO_FENCE)
47933ad4
PZ
39
40/*
4f3aaf2c 41 * For this option x86 doesn't have a strong TSO memory
47933ad4
PZ
42 * model and we should fall back to full barriers.
43 */
44
1638fb72 45#define __smp_store_release(p, v) \
47933ad4
PZ
46do { \
47 compiletime_assert_atomic_type(*p); \
1638fb72 48 __smp_mb(); \
76695af2 49 WRITE_ONCE(*p, v); \
47933ad4
PZ
50} while (0)
51
1638fb72 52#define __smp_load_acquire(p) \
47933ad4 53({ \
76695af2 54 typeof(*p) ___p1 = READ_ONCE(*p); \
47933ad4 55 compiletime_assert_atomic_type(*p); \
1638fb72 56 __smp_mb(); \
47933ad4
PZ
57 ___p1; \
58})
59
60#else /* regular x86 TSO memory ordering */
61
1638fb72 62#define __smp_store_release(p, v) \
47933ad4
PZ
63do { \
64 compiletime_assert_atomic_type(*p); \
65 barrier(); \
76695af2 66 WRITE_ONCE(*p, v); \
47933ad4
PZ
67} while (0)
68
1638fb72 69#define __smp_load_acquire(p) \
47933ad4 70({ \
76695af2 71 typeof(*p) ___p1 = READ_ONCE(*p); \
47933ad4
PZ
72 compiletime_assert_atomic_type(*p); \
73 barrier(); \
74 ___p1; \
75})
76
f05e798a
DH
77#endif
78
d00a5692 79/* Atomic operations are already serializing on x86 */
1638fb72
MT
80#define __smp_mb__before_atomic() barrier()
81#define __smp_mb__after_atomic() barrier()
d00a5692 82
300b06d4
MT
83#include <asm-generic/barrier.h>
84
f05e798a 85#endif /* _ASM_X86_BARRIER_H */
This page took 0.206515 seconds and 5 git commands to generate.