Commit | Line | Data |
---|---|---|
be7baf80 TG |
1 | #ifndef __ASM_X86_MSR_H_ |
2 | #define __ASM_X86_MSR_H_ | |
3 | ||
4 | #include <asm/msr-index.h> | |
5 | ||
d43a3312 MF |
6 | #ifndef __ASSEMBLY__ |
7 | # include <linux/types.h> | |
8 | #endif | |
9 | ||
8f12dea6 GOC |
10 | #ifdef __KERNEL__ |
11 | #ifndef __ASSEMBLY__ | |
c210d249 GOC |
12 | |
13 | #include <asm/asm.h> | |
14 | #include <asm/errno.h> | |
15 | ||
1e160cc3 | 16 | static inline unsigned long long native_read_tscp(unsigned int *aux) |
8f12dea6 GOC |
17 | { |
18 | unsigned long low, high; | |
abb0ade0 JP |
19 | asm volatile(".byte 0x0f,0x01,0xf9" |
20 | : "=a" (low), "=d" (high), "=c" (*aux)); | |
41aefdcc | 21 | return low | ((u64)high << 32); |
8f12dea6 GOC |
22 | } |
23 | ||
c210d249 GOC |
24 | /* |
25 | * i386 calling convention returns 64-bit value in edx:eax, while | |
26 | * x86_64 returns at rax. Also, the "A" constraint does not really | |
27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each | |
28 | * architecture | |
29 | */ | |
30 | #ifdef CONFIG_X86_64 | |
31 | #define DECLARE_ARGS(val, low, high) unsigned low, high | |
abb0ade0 | 32 | #define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32)) |
c210d249 GOC |
33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) |
34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) | |
35 | #else | |
36 | #define DECLARE_ARGS(val, low, high) unsigned long long val | |
37 | #define EAX_EDX_VAL(val, low, high) (val) | |
38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) | |
39 | #define EAX_EDX_RET(val, low, high) "=A" (val) | |
8f12dea6 GOC |
40 | #endif |
41 | ||
be7baf80 TG |
42 | static inline unsigned long long native_read_msr(unsigned int msr) |
43 | { | |
c210d249 | 44 | DECLARE_ARGS(val, low, high); |
be7baf80 | 45 | |
c210d249 GOC |
46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); |
47 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
48 | } |
49 | ||
50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | |
51 | int *err) | |
52 | { | |
c210d249 | 53 | DECLARE_ARGS(val, low, high); |
be7baf80 | 54 | |
56ec1ddc | 55 | asm volatile("2: rdmsr ; xor %0,%0\n" |
be7baf80 TG |
56 | "1:\n\t" |
57 | ".section .fixup,\"ax\"\n\t" | |
56ec1ddc | 58 | "3: mov %3,%0 ; jmp 1b\n\t" |
be7baf80 | 59 | ".previous\n\t" |
abb0ade0 | 60 | _ASM_EXTABLE(2b, 3b) |
c210d249 | 61 | : "=r" (*err), EAX_EDX_RET(val, low, high) |
be7baf80 | 62 | : "c" (msr), "i" (-EFAULT)); |
c210d249 | 63 | return EAX_EDX_VAL(val, low, high); |
be7baf80 TG |
64 | } |
65 | ||
c9dcda5c GOC |
66 | static inline void native_write_msr(unsigned int msr, |
67 | unsigned low, unsigned high) | |
be7baf80 | 68 | { |
af2b1c60 | 69 | asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); |
be7baf80 TG |
70 | } |
71 | ||
72 | static inline int native_write_msr_safe(unsigned int msr, | |
c9dcda5c | 73 | unsigned low, unsigned high) |
be7baf80 TG |
74 | { |
75 | int err; | |
56ec1ddc | 76 | asm volatile("2: wrmsr ; xor %0,%0\n" |
be7baf80 TG |
77 | "1:\n\t" |
78 | ".section .fixup,\"ax\"\n\t" | |
56ec1ddc | 79 | "3: mov %4,%0 ; jmp 1b\n\t" |
be7baf80 | 80 | ".previous\n\t" |
abb0ade0 | 81 | _ASM_EXTABLE(2b, 3b) |
be7baf80 | 82 | : "=a" (err) |
c9dcda5c | 83 | : "c" (msr), "0" (low), "d" (high), |
af2b1c60 JF |
84 | "i" (-EFAULT) |
85 | : "memory"); | |
be7baf80 TG |
86 | return err; |
87 | } | |
88 | ||
cdc7957d | 89 | extern unsigned long long native_read_tsc(void); |
be7baf80 | 90 | |
92767af0 IM |
91 | static __always_inline unsigned long long __native_read_tsc(void) |
92 | { | |
93 | DECLARE_ARGS(val, low, high); | |
94 | ||
95 | rdtsc_barrier(); | |
96 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); | |
97 | rdtsc_barrier(); | |
98 | ||
99 | return EAX_EDX_VAL(val, low, high); | |
100 | } | |
101 | ||
b8d1fae7 | 102 | static inline unsigned long long native_read_pmc(int counter) |
be7baf80 | 103 | { |
c210d249 GOC |
104 | DECLARE_ARGS(val, low, high); |
105 | ||
106 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); | |
107 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
108 | } |
109 | ||
110 | #ifdef CONFIG_PARAVIRT | |
111 | #include <asm/paravirt.h> | |
96a388de | 112 | #else |
be7baf80 TG |
113 | #include <linux/errno.h> |
114 | /* | |
115 | * Access to machine-specific registers (available on 586 and better only) | |
116 | * Note: the rd* operations modify the parameters directly (without using | |
117 | * pointer indirection), this allows gcc to optimize better | |
118 | */ | |
119 | ||
abb0ade0 JP |
120 | #define rdmsr(msr, val1, val2) \ |
121 | do { \ | |
122 | u64 __val = native_read_msr((msr)); \ | |
123 | (val1) = (u32)__val; \ | |
124 | (val2) = (u32)(__val >> 32); \ | |
125 | } while (0) | |
be7baf80 | 126 | |
c9dcda5c | 127 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 128 | { |
c9dcda5c | 129 | native_write_msr(msr, low, high); |
be7baf80 TG |
130 | } |
131 | ||
abb0ade0 JP |
132 | #define rdmsrl(msr, val) \ |
133 | ((val) = native_read_msr((msr))) | |
be7baf80 | 134 | |
c210d249 | 135 | #define wrmsrl(msr, val) \ |
abb0ade0 | 136 | native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32)) |
be7baf80 TG |
137 | |
138 | /* wrmsr with exception handling */ | |
c9dcda5c | 139 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 140 | { |
c9dcda5c | 141 | return native_write_msr_safe(msr, low, high); |
be7baf80 TG |
142 | } |
143 | ||
144 | /* rdmsr with exception handling */ | |
abb0ade0 JP |
145 | #define rdmsr_safe(msr, p1, p2) \ |
146 | ({ \ | |
147 | int __err; \ | |
148 | u64 __val = native_read_msr_safe((msr), &__err); \ | |
149 | (*p1) = (u32)__val; \ | |
150 | (*p2) = (u32)(__val >> 32); \ | |
151 | __err; \ | |
152 | }) | |
be7baf80 | 153 | |
1de87bd4 AK |
154 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) |
155 | { | |
156 | int err; | |
157 | ||
158 | *p = native_read_msr_safe(msr, &err); | |
159 | return err; | |
160 | } | |
161 | ||
be7baf80 TG |
162 | #define rdtscl(low) \ |
163 | ((low) = (u32)native_read_tsc()) | |
164 | ||
165 | #define rdtscll(val) \ | |
166 | ((val) = native_read_tsc()) | |
167 | ||
abb0ade0 JP |
168 | #define rdpmc(counter, low, high) \ |
169 | do { \ | |
170 | u64 _l = native_read_pmc((counter)); \ | |
171 | (low) = (u32)_l; \ | |
172 | (high) = (u32)(_l >> 32); \ | |
173 | } while (0) | |
be7baf80 | 174 | |
abb0ade0 JP |
175 | #define rdtscp(low, high, aux) \ |
176 | do { \ | |
177 | unsigned long long _val = native_read_tscp(&(aux)); \ | |
178 | (low) = (u32)_val; \ | |
179 | (high) = (u32)(_val >> 32); \ | |
180 | } while (0) | |
be7baf80 | 181 | |
c210d249 | 182 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) |
be7baf80 | 183 | |
c210d249 | 184 | #endif /* !CONFIG_PARAVIRT */ |
be7baf80 | 185 | |
be7baf80 | 186 | |
abb0ade0 JP |
187 | #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \ |
188 | (u32)((val) >> 32)) | |
be7baf80 | 189 | |
abb0ade0 | 190 | #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2)) |
be7baf80 | 191 | |
abb0ade0 | 192 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0) |
be7baf80 | 193 | |
be7baf80 TG |
194 | #ifdef CONFIG_SMP |
195 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | |
196 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
197 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | |
abb0ade0 | 198 | |
be7baf80 TG |
199 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
200 | #else /* CONFIG_SMP */ | |
201 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | |
202 | { | |
203 | rdmsr(msr_no, *l, *h); | |
204 | } | |
205 | static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
206 | { | |
207 | wrmsr(msr_no, l, h); | |
208 | } | |
abb0ade0 JP |
209 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, |
210 | u32 *l, u32 *h) | |
be7baf80 TG |
211 | { |
212 | return rdmsr_safe(msr_no, l, h); | |
213 | } | |
214 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
215 | { | |
216 | return wrmsr_safe(msr_no, l, h); | |
217 | } | |
218 | #endif /* CONFIG_SMP */ | |
751de83c | 219 | #endif /* __ASSEMBLY__ */ |
c210d249 GOC |
220 | #endif /* __KERNEL__ */ |
221 | ||
be7baf80 | 222 | |
96a388de | 223 | #endif |