Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ASM_MSR_H |
2 | #define __ASM_MSR_H | |
3 | ||
90a0a06a RR |
4 | #include <asm/errno.h> |
5 | ||
6 | static inline unsigned long long native_read_msr(unsigned int msr) | |
7 | { | |
8 | unsigned long long val; | |
9 | ||
10 | asm volatile("rdmsr" : "=A" (val) : "c" (msr)); | |
11 | return val; | |
12 | } | |
13 | ||
14 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | |
15 | int *err) | |
16 | { | |
17 | unsigned long long val; | |
18 | ||
19 | asm volatile("2: rdmsr ; xorl %0,%0\n" | |
20 | "1:\n\t" | |
21 | ".section .fixup,\"ax\"\n\t" | |
22 | "3: movl %3,%0 ; jmp 1b\n\t" | |
23 | ".previous\n\t" | |
24 | ".section __ex_table,\"a\"\n" | |
25 | " .align 4\n\t" | |
26 | " .long 2b,3b\n\t" | |
27 | ".previous" | |
28 | : "=r" (*err), "=A" (val) | |
29 | : "c" (msr), "i" (-EFAULT)); | |
30 | ||
31 | return val; | |
32 | } | |
33 | ||
34 | static inline void native_write_msr(unsigned int msr, unsigned long long val) | |
35 | { | |
36 | asm volatile("wrmsr" : : "c" (msr), "A"(val)); | |
37 | } | |
38 | ||
39 | static inline int native_write_msr_safe(unsigned int msr, | |
40 | unsigned long long val) | |
41 | { | |
42 | int err; | |
43 | asm volatile("2: wrmsr ; xorl %0,%0\n" | |
44 | "1:\n\t" | |
45 | ".section .fixup,\"ax\"\n\t" | |
46 | "3: movl %4,%0 ; jmp 1b\n\t" | |
47 | ".previous\n\t" | |
48 | ".section __ex_table,\"a\"\n" | |
49 | " .align 4\n\t" | |
50 | " .long 2b,3b\n\t" | |
51 | ".previous" | |
52 | : "=a" (err) | |
53 | : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)), | |
54 | "i" (-EFAULT)); | |
55 | return err; | |
56 | } | |
57 | ||
58 | static inline unsigned long long native_read_tsc(void) | |
59 | { | |
60 | unsigned long long val; | |
61 | asm volatile("rdtsc" : "=A" (val)); | |
62 | return val; | |
63 | } | |
64 | ||
65 | static inline unsigned long long native_read_pmc(void) | |
66 | { | |
67 | unsigned long long val; | |
68 | asm volatile("rdpmc" : "=A" (val)); | |
69 | return val; | |
70 | } | |
71 | ||
d3561b7f RR |
72 | #ifdef CONFIG_PARAVIRT |
73 | #include <asm/paravirt.h> | |
74 | #else | |
75 | ||
1da177e4 LT |
76 | /* |
77 | * Access to machine-specific registers (available on 586 and better only) | |
78 | * Note: the rd* operations modify the parameters directly (without using | |
79 | * pointer indirection), this allows gcc to optimize better | |
80 | */ | |
81 | ||
90a0a06a RR |
82 | #define rdmsr(msr,val1,val2) \ |
83 | do { \ | |
84 | unsigned long long __val = native_read_msr(msr); \ | |
85 | val1 = __val; \ | |
86 | val2 = __val >> 32; \ | |
87 | } while(0) | |
1da177e4 | 88 | |
90a0a06a RR |
89 | #define wrmsr(msr,val1,val2) \ |
90 | native_write_msr(msr, ((unsigned long long)val2 << 32) | val1) | |
1da177e4 | 91 | |
90a0a06a RR |
92 | #define rdmsrl(msr,val) \ |
93 | do { \ | |
94 | (val) = native_read_msr(msr); \ | |
95 | } while(0) | |
1da177e4 LT |
96 | |
97 | static inline void wrmsrl (unsigned long msr, unsigned long long val) | |
98 | { | |
99 | unsigned long lo, hi; | |
100 | lo = (unsigned long) val; | |
101 | hi = val >> 32; | |
102 | wrmsr (msr, lo, hi); | |
103 | } | |
104 | ||
105 | /* wrmsr with exception handling */ | |
90a0a06a RR |
106 | #define wrmsr_safe(msr,val1,val2) \ |
107 | (native_write_msr_safe(msr, ((unsigned long long)val2 << 32) | val1)) | |
1da177e4 | 108 | |
f2ab4461 | 109 | /* rdmsr with exception handling */ |
90a0a06a RR |
110 | #define rdmsr_safe(msr,p1,p2) \ |
111 | ({ \ | |
112 | int __err; \ | |
113 | unsigned long long __val = native_read_msr_safe(msr, &__err);\ | |
114 | (*p1) = __val; \ | |
115 | (*p2) = __val >> 32; \ | |
116 | __err; \ | |
117 | }) | |
118 | ||
119 | #define rdtsc(low,high) \ | |
120 | do { \ | |
121 | u64 _l = native_read_tsc(); \ | |
122 | (low) = (u32)_l; \ | |
123 | (high) = _l >> 32; \ | |
124 | } while(0) | |
125 | ||
126 | #define rdtscl(low) \ | |
127 | do { \ | |
128 | (low) = native_read_tsc(); \ | |
129 | } while(0) | |
130 | ||
131 | #define rdtscll(val) ((val) = native_read_tsc()) | |
1da177e4 LT |
132 | |
133 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) | |
134 | ||
90a0a06a RR |
135 | #define rdpmc(counter,low,high) \ |
136 | do { \ | |
137 | u64 _l = native_read_pmc(); \ | |
138 | low = (u32)_l; \ | |
139 | high = _l >> 32; \ | |
140 | } while(0) | |
d3561b7f | 141 | #endif /* !CONFIG_PARAVIRT */ |
1da177e4 | 142 | |
b44755cf | 143 | #ifdef CONFIG_SMP |
b077ffb3 AD |
144 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
145 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
b44755cf AB |
146 | #else /* CONFIG_SMP */ |
147 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | |
148 | { | |
149 | rdmsr(msr_no, *l, *h); | |
150 | } | |
151 | static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
152 | { | |
153 | wrmsr(msr_no, l, h); | |
154 | } | |
155 | #endif /* CONFIG_SMP */ | |
b077ffb3 | 156 | |
1da177e4 LT |
157 | /* symbolic names for some interesting MSRs */ |
158 | /* Intel defined MSRs. */ | |
159 | #define MSR_IA32_P5_MC_ADDR 0 | |
160 | #define MSR_IA32_P5_MC_TYPE 1 | |
161 | #define MSR_IA32_PLATFORM_ID 0x17 | |
162 | #define MSR_IA32_EBL_CR_POWERON 0x2a | |
163 | ||
164 | #define MSR_IA32_APICBASE 0x1b | |
165 | #define MSR_IA32_APICBASE_BSP (1<<8) | |
166 | #define MSR_IA32_APICBASE_ENABLE (1<<11) | |
167 | #define MSR_IA32_APICBASE_BASE (0xfffff<<12) | |
168 | ||
169 | #define MSR_IA32_UCODE_WRITE 0x79 | |
170 | #define MSR_IA32_UCODE_REV 0x8b | |
171 | ||
172 | #define MSR_P6_PERFCTR0 0xc1 | |
173 | #define MSR_P6_PERFCTR1 0xc2 | |
4e74663c DB |
174 | #define MSR_FSB_FREQ 0xcd |
175 | ||
1da177e4 LT |
176 | |
177 | #define MSR_IA32_BBL_CR_CTL 0x119 | |
178 | ||
179 | #define MSR_IA32_SYSENTER_CS 0x174 | |
180 | #define MSR_IA32_SYSENTER_ESP 0x175 | |
181 | #define MSR_IA32_SYSENTER_EIP 0x176 | |
182 | ||
183 | #define MSR_IA32_MCG_CAP 0x179 | |
184 | #define MSR_IA32_MCG_STATUS 0x17a | |
185 | #define MSR_IA32_MCG_CTL 0x17b | |
186 | ||
187 | /* P4/Xeon+ specific */ | |
188 | #define MSR_IA32_MCG_EAX 0x180 | |
189 | #define MSR_IA32_MCG_EBX 0x181 | |
190 | #define MSR_IA32_MCG_ECX 0x182 | |
191 | #define MSR_IA32_MCG_EDX 0x183 | |
192 | #define MSR_IA32_MCG_ESI 0x184 | |
193 | #define MSR_IA32_MCG_EDI 0x185 | |
194 | #define MSR_IA32_MCG_EBP 0x186 | |
195 | #define MSR_IA32_MCG_ESP 0x187 | |
196 | #define MSR_IA32_MCG_EFLAGS 0x188 | |
197 | #define MSR_IA32_MCG_EIP 0x189 | |
198 | #define MSR_IA32_MCG_RESERVED 0x18A | |
199 | ||
200 | #define MSR_P6_EVNTSEL0 0x186 | |
201 | #define MSR_P6_EVNTSEL1 0x187 | |
202 | ||
203 | #define MSR_IA32_PERF_STATUS 0x198 | |
204 | #define MSR_IA32_PERF_CTL 0x199 | |
205 | ||
dfde5d62 VP |
206 | #define MSR_IA32_MPERF 0xE7 |
207 | #define MSR_IA32_APERF 0xE8 | |
208 | ||
1da177e4 LT |
209 | #define MSR_IA32_THERM_CONTROL 0x19a |
210 | #define MSR_IA32_THERM_INTERRUPT 0x19b | |
211 | #define MSR_IA32_THERM_STATUS 0x19c | |
212 | #define MSR_IA32_MISC_ENABLE 0x1a0 | |
213 | ||
214 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 | |
215 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db | |
216 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc | |
217 | #define MSR_IA32_LASTINTFROMIP 0x1dd | |
218 | #define MSR_IA32_LASTINTTOIP 0x1de | |
219 | ||
220 | #define MSR_IA32_MC0_CTL 0x400 | |
221 | #define MSR_IA32_MC0_STATUS 0x401 | |
222 | #define MSR_IA32_MC0_ADDR 0x402 | |
223 | #define MSR_IA32_MC0_MISC 0x403 | |
224 | ||
bb0d977e SE |
225 | #define MSR_IA32_PEBS_ENABLE 0x3f1 |
226 | #define MSR_IA32_DS_AREA 0x600 | |
227 | #define MSR_IA32_PERF_CAPABILITIES 0x345 | |
228 | ||
1da177e4 LT |
229 | /* Pentium IV performance counter MSRs */ |
230 | #define MSR_P4_BPU_PERFCTR0 0x300 | |
231 | #define MSR_P4_BPU_PERFCTR1 0x301 | |
232 | #define MSR_P4_BPU_PERFCTR2 0x302 | |
233 | #define MSR_P4_BPU_PERFCTR3 0x303 | |
234 | #define MSR_P4_MS_PERFCTR0 0x304 | |
235 | #define MSR_P4_MS_PERFCTR1 0x305 | |
236 | #define MSR_P4_MS_PERFCTR2 0x306 | |
237 | #define MSR_P4_MS_PERFCTR3 0x307 | |
238 | #define MSR_P4_FLAME_PERFCTR0 0x308 | |
239 | #define MSR_P4_FLAME_PERFCTR1 0x309 | |
240 | #define MSR_P4_FLAME_PERFCTR2 0x30a | |
241 | #define MSR_P4_FLAME_PERFCTR3 0x30b | |
242 | #define MSR_P4_IQ_PERFCTR0 0x30c | |
243 | #define MSR_P4_IQ_PERFCTR1 0x30d | |
244 | #define MSR_P4_IQ_PERFCTR2 0x30e | |
245 | #define MSR_P4_IQ_PERFCTR3 0x30f | |
246 | #define MSR_P4_IQ_PERFCTR4 0x310 | |
247 | #define MSR_P4_IQ_PERFCTR5 0x311 | |
248 | #define MSR_P4_BPU_CCCR0 0x360 | |
249 | #define MSR_P4_BPU_CCCR1 0x361 | |
250 | #define MSR_P4_BPU_CCCR2 0x362 | |
251 | #define MSR_P4_BPU_CCCR3 0x363 | |
252 | #define MSR_P4_MS_CCCR0 0x364 | |
253 | #define MSR_P4_MS_CCCR1 0x365 | |
254 | #define MSR_P4_MS_CCCR2 0x366 | |
255 | #define MSR_P4_MS_CCCR3 0x367 | |
256 | #define MSR_P4_FLAME_CCCR0 0x368 | |
257 | #define MSR_P4_FLAME_CCCR1 0x369 | |
258 | #define MSR_P4_FLAME_CCCR2 0x36a | |
259 | #define MSR_P4_FLAME_CCCR3 0x36b | |
260 | #define MSR_P4_IQ_CCCR0 0x36c | |
261 | #define MSR_P4_IQ_CCCR1 0x36d | |
262 | #define MSR_P4_IQ_CCCR2 0x36e | |
263 | #define MSR_P4_IQ_CCCR3 0x36f | |
264 | #define MSR_P4_IQ_CCCR4 0x370 | |
265 | #define MSR_P4_IQ_CCCR5 0x371 | |
266 | #define MSR_P4_ALF_ESCR0 0x3ca | |
267 | #define MSR_P4_ALF_ESCR1 0x3cb | |
268 | #define MSR_P4_BPU_ESCR0 0x3b2 | |
269 | #define MSR_P4_BPU_ESCR1 0x3b3 | |
270 | #define MSR_P4_BSU_ESCR0 0x3a0 | |
271 | #define MSR_P4_BSU_ESCR1 0x3a1 | |
272 | #define MSR_P4_CRU_ESCR0 0x3b8 | |
273 | #define MSR_P4_CRU_ESCR1 0x3b9 | |
274 | #define MSR_P4_CRU_ESCR2 0x3cc | |
275 | #define MSR_P4_CRU_ESCR3 0x3cd | |
276 | #define MSR_P4_CRU_ESCR4 0x3e0 | |
277 | #define MSR_P4_CRU_ESCR5 0x3e1 | |
278 | #define MSR_P4_DAC_ESCR0 0x3a8 | |
279 | #define MSR_P4_DAC_ESCR1 0x3a9 | |
280 | #define MSR_P4_FIRM_ESCR0 0x3a4 | |
281 | #define MSR_P4_FIRM_ESCR1 0x3a5 | |
282 | #define MSR_P4_FLAME_ESCR0 0x3a6 | |
283 | #define MSR_P4_FLAME_ESCR1 0x3a7 | |
284 | #define MSR_P4_FSB_ESCR0 0x3a2 | |
285 | #define MSR_P4_FSB_ESCR1 0x3a3 | |
286 | #define MSR_P4_IQ_ESCR0 0x3ba | |
287 | #define MSR_P4_IQ_ESCR1 0x3bb | |
288 | #define MSR_P4_IS_ESCR0 0x3b4 | |
289 | #define MSR_P4_IS_ESCR1 0x3b5 | |
290 | #define MSR_P4_ITLB_ESCR0 0x3b6 | |
291 | #define MSR_P4_ITLB_ESCR1 0x3b7 | |
292 | #define MSR_P4_IX_ESCR0 0x3c8 | |
293 | #define MSR_P4_IX_ESCR1 0x3c9 | |
294 | #define MSR_P4_MOB_ESCR0 0x3aa | |
295 | #define MSR_P4_MOB_ESCR1 0x3ab | |
296 | #define MSR_P4_MS_ESCR0 0x3c0 | |
297 | #define MSR_P4_MS_ESCR1 0x3c1 | |
298 | #define MSR_P4_PMH_ESCR0 0x3ac | |
299 | #define MSR_P4_PMH_ESCR1 0x3ad | |
300 | #define MSR_P4_RAT_ESCR0 0x3bc | |
301 | #define MSR_P4_RAT_ESCR1 0x3bd | |
302 | #define MSR_P4_SAAT_ESCR0 0x3ae | |
303 | #define MSR_P4_SAAT_ESCR1 0x3af | |
304 | #define MSR_P4_SSU_ESCR0 0x3be | |
305 | #define MSR_P4_SSU_ESCR1 0x3bf /* guess: not defined in manual */ | |
306 | #define MSR_P4_TBPU_ESCR0 0x3c2 | |
307 | #define MSR_P4_TBPU_ESCR1 0x3c3 | |
308 | #define MSR_P4_TC_ESCR0 0x3c4 | |
309 | #define MSR_P4_TC_ESCR1 0x3c5 | |
310 | #define MSR_P4_U2L_ESCR0 0x3b0 | |
311 | #define MSR_P4_U2L_ESCR1 0x3b1 | |
312 | ||
313 | /* AMD Defined MSRs */ | |
314 | #define MSR_K6_EFER 0xC0000080 | |
315 | #define MSR_K6_STAR 0xC0000081 | |
316 | #define MSR_K6_WHCR 0xC0000082 | |
317 | #define MSR_K6_UWCCR 0xC0000085 | |
318 | #define MSR_K6_EPMR 0xC0000086 | |
319 | #define MSR_K6_PSOR 0xC0000087 | |
320 | #define MSR_K6_PFIR 0xC0000088 | |
321 | ||
322 | #define MSR_K7_EVNTSEL0 0xC0010000 | |
323 | #define MSR_K7_EVNTSEL1 0xC0010001 | |
324 | #define MSR_K7_EVNTSEL2 0xC0010002 | |
325 | #define MSR_K7_EVNTSEL3 0xC0010003 | |
326 | #define MSR_K7_PERFCTR0 0xC0010004 | |
327 | #define MSR_K7_PERFCTR1 0xC0010005 | |
328 | #define MSR_K7_PERFCTR2 0xC0010006 | |
329 | #define MSR_K7_PERFCTR3 0xC0010007 | |
330 | #define MSR_K7_HWCR 0xC0010015 | |
331 | #define MSR_K7_CLK_CTL 0xC001001b | |
332 | #define MSR_K7_FID_VID_CTL 0xC0010041 | |
333 | #define MSR_K7_FID_VID_STATUS 0xC0010042 | |
334 | ||
3556ddfa AK |
335 | #define MSR_K8_ENABLE_C1E 0xC0010055 |
336 | ||
1da177e4 LT |
337 | /* extended feature register */ |
338 | #define MSR_EFER 0xc0000080 | |
339 | ||
340 | /* EFER bits: */ | |
341 | ||
342 | /* Execute Disable enable */ | |
343 | #define _EFER_NX 11 | |
344 | #define EFER_NX (1<<_EFER_NX) | |
345 | ||
346 | /* Centaur-Hauls/IDT defined MSRs. */ | |
347 | #define MSR_IDT_FCR1 0x107 | |
348 | #define MSR_IDT_FCR2 0x108 | |
349 | #define MSR_IDT_FCR3 0x109 | |
350 | #define MSR_IDT_FCR4 0x10a | |
351 | ||
352 | #define MSR_IDT_MCR0 0x110 | |
353 | #define MSR_IDT_MCR1 0x111 | |
354 | #define MSR_IDT_MCR2 0x112 | |
355 | #define MSR_IDT_MCR3 0x113 | |
356 | #define MSR_IDT_MCR4 0x114 | |
357 | #define MSR_IDT_MCR5 0x115 | |
358 | #define MSR_IDT_MCR6 0x116 | |
359 | #define MSR_IDT_MCR7 0x117 | |
360 | #define MSR_IDT_MCR_CTRL 0x120 | |
361 | ||
362 | /* VIA Cyrix defined MSRs*/ | |
363 | #define MSR_VIA_FCR 0x1107 | |
364 | #define MSR_VIA_LONGHAUL 0x110a | |
365 | #define MSR_VIA_RNG 0x110b | |
366 | #define MSR_VIA_BCR2 0x1147 | |
367 | ||
368 | /* Transmeta defined MSRs */ | |
369 | #define MSR_TMTA_LONGRUN_CTRL 0x80868010 | |
370 | #define MSR_TMTA_LONGRUN_FLAGS 0x80868011 | |
371 | #define MSR_TMTA_LRTI_READOUT 0x80868018 | |
372 | #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a | |
373 | ||
bb0d977e SE |
374 | /* Intel Core-based CPU performance counters */ |
375 | #define MSR_CORE_PERF_FIXED_CTR0 0x309 | |
376 | #define MSR_CORE_PERF_FIXED_CTR1 0x30a | |
377 | #define MSR_CORE_PERF_FIXED_CTR2 0x30b | |
378 | #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x38d | |
379 | #define MSR_CORE_PERF_GLOBAL_STATUS 0x38e | |
380 | #define MSR_CORE_PERF_GLOBAL_CTRL 0x38f | |
381 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x390 | |
382 | ||
07190a08 MT |
383 | /* Geode defined MSRs */ |
384 | #define MSR_GEODE_BUSCONT_CONF0 0x1900 | |
385 | ||
1da177e4 | 386 | #endif /* __ASM_MSR_H */ |