Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef X86_64_MSR_H |
2 | #define X86_64_MSR_H 1 | |
3 | ||
4 | #ifndef __ASSEMBLY__ | |
5 | /* | |
6 | * Access to machine-specific registers (available on 586 and better only) | |
7 | * Note: the rd* operations modify the parameters directly (without using | |
8 | * pointer indirection), this allows gcc to optimize better | |
9 | */ | |
10 | ||
11 | #define rdmsr(msr,val1,val2) \ | |
12 | __asm__ __volatile__("rdmsr" \ | |
13 | : "=a" (val1), "=d" (val2) \ | |
14 | : "c" (msr)) | |
15 | ||
16 | ||
17 | #define rdmsrl(msr,val) do { unsigned long a__,b__; \ | |
18 | __asm__ __volatile__("rdmsr" \ | |
19 | : "=a" (a__), "=d" (b__) \ | |
20 | : "c" (msr)); \ | |
21 | val = a__ | (b__<<32); \ | |
22 | } while(0); | |
23 | ||
24 | #define wrmsr(msr,val1,val2) \ | |
25 | __asm__ __volatile__("wrmsr" \ | |
26 | : /* no outputs */ \ | |
27 | : "c" (msr), "a" (val1), "d" (val2)) | |
28 | ||
29 | #define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32) | |
30 | ||
31 | /* wrmsr with exception handling */ | |
32 | #define wrmsr_safe(msr,a,b) ({ int ret__; \ | |
33 | asm volatile("2: wrmsr ; xorl %0,%0\n" \ | |
34 | "1:\n\t" \ | |
35 | ".section .fixup,\"ax\"\n\t" \ | |
36 | "3: movl %4,%0 ; jmp 1b\n\t" \ | |
37 | ".previous\n\t" \ | |
38 | ".section __ex_table,\"a\"\n" \ | |
39 | " .align 8\n\t" \ | |
40 | " .quad 2b,3b\n\t" \ | |
41 | ".previous" \ | |
42 | : "=a" (ret__) \ | |
43 | : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT));\ | |
44 | ret__; }) | |
45 | ||
46 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) | |
47 | ||
48 | #define rdtsc(low,high) \ | |
49 | __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high)) | |
50 | ||
51 | #define rdtscl(low) \ | |
52 | __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx") | |
53 | ||
54 | #define rdtscll(val) do { \ | |
55 | unsigned int __a,__d; \ | |
56 | asm volatile("rdtsc" : "=a" (__a), "=d" (__d)); \ | |
57 | (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \ | |
58 | } while(0) | |
59 | ||
1da177e4 LT |
60 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) |
61 | ||
62 | #define rdpmc(counter,low,high) \ | |
63 | __asm__ __volatile__("rdpmc" \ | |
64 | : "=a" (low), "=d" (high) \ | |
65 | : "c" (counter)) | |
66 | ||
67 | extern inline void cpuid(int op, unsigned int *eax, unsigned int *ebx, | |
68 | unsigned int *ecx, unsigned int *edx) | |
69 | { | |
70 | __asm__("cpuid" | |
71 | : "=a" (*eax), | |
72 | "=b" (*ebx), | |
73 | "=c" (*ecx), | |
74 | "=d" (*edx) | |
75 | : "0" (op)); | |
76 | } | |
77 | ||
78 | /* Some CPUID calls want 'count' to be placed in ecx */ | |
79 | static inline void cpuid_count(int op, int count, int *eax, int *ebx, int *ecx, | |
80 | int *edx) | |
81 | { | |
82 | __asm__("cpuid" | |
83 | : "=a" (*eax), | |
84 | "=b" (*ebx), | |
85 | "=c" (*ecx), | |
86 | "=d" (*edx) | |
87 | : "0" (op), "c" (count)); | |
88 | } | |
89 | ||
90 | /* | |
91 | * CPUID functions returning a single datum | |
92 | */ | |
93 | extern inline unsigned int cpuid_eax(unsigned int op) | |
94 | { | |
95 | unsigned int eax; | |
96 | ||
97 | __asm__("cpuid" | |
98 | : "=a" (eax) | |
99 | : "0" (op) | |
100 | : "bx", "cx", "dx"); | |
101 | return eax; | |
102 | } | |
103 | extern inline unsigned int cpuid_ebx(unsigned int op) | |
104 | { | |
105 | unsigned int eax, ebx; | |
106 | ||
107 | __asm__("cpuid" | |
108 | : "=a" (eax), "=b" (ebx) | |
109 | : "0" (op) | |
110 | : "cx", "dx" ); | |
111 | return ebx; | |
112 | } | |
113 | extern inline unsigned int cpuid_ecx(unsigned int op) | |
114 | { | |
115 | unsigned int eax, ecx; | |
116 | ||
117 | __asm__("cpuid" | |
118 | : "=a" (eax), "=c" (ecx) | |
119 | : "0" (op) | |
120 | : "bx", "dx" ); | |
121 | return ecx; | |
122 | } | |
123 | extern inline unsigned int cpuid_edx(unsigned int op) | |
124 | { | |
125 | unsigned int eax, edx; | |
126 | ||
127 | __asm__("cpuid" | |
128 | : "=a" (eax), "=d" (edx) | |
129 | : "0" (op) | |
130 | : "bx", "cx"); | |
131 | return edx; | |
132 | } | |
133 | ||
134 | #define MSR_IA32_UCODE_WRITE 0x79 | |
135 | #define MSR_IA32_UCODE_REV 0x8b | |
136 | ||
137 | ||
138 | #endif | |
139 | ||
140 | /* AMD/K8 specific MSRs */ | |
141 | #define MSR_EFER 0xc0000080 /* extended feature register */ | |
142 | #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ | |
143 | #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ | |
144 | #define MSR_CSTAR 0xc0000083 /* compatibility mode SYSCALL target */ | |
145 | #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ | |
146 | #define MSR_FS_BASE 0xc0000100 /* 64bit GS base */ | |
147 | #define MSR_GS_BASE 0xc0000101 /* 64bit FS base */ | |
148 | #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow (or USER_GS from kernel) */ | |
149 | /* EFER bits: */ | |
150 | #define _EFER_SCE 0 /* SYSCALL/SYSRET */ | |
151 | #define _EFER_LME 8 /* Long mode enable */ | |
152 | #define _EFER_LMA 10 /* Long mode active (read-only) */ | |
153 | #define _EFER_NX 11 /* No execute enable */ | |
154 | ||
155 | #define EFER_SCE (1<<_EFER_SCE) | |
156 | #define EFER_LME (1<<_EFER_LME) | |
157 | #define EFER_LMA (1<<_EFER_LMA) | |
158 | #define EFER_NX (1<<_EFER_NX) | |
159 | ||
160 | /* Intel MSRs. Some also available on other CPUs */ | |
a8ab26fe | 161 | #define MSR_IA32_TSC 0x10 |
1da177e4 LT |
162 | #define MSR_IA32_PLATFORM_ID 0x17 |
163 | ||
164 | #define MSR_IA32_PERFCTR0 0xc1 | |
165 | #define MSR_IA32_PERFCTR1 0xc2 | |
166 | ||
167 | #define MSR_MTRRcap 0x0fe | |
168 | #define MSR_IA32_BBL_CR_CTL 0x119 | |
169 | ||
170 | #define MSR_IA32_SYSENTER_CS 0x174 | |
171 | #define MSR_IA32_SYSENTER_ESP 0x175 | |
172 | #define MSR_IA32_SYSENTER_EIP 0x176 | |
173 | ||
174 | #define MSR_IA32_MCG_CAP 0x179 | |
175 | #define MSR_IA32_MCG_STATUS 0x17a | |
176 | #define MSR_IA32_MCG_CTL 0x17b | |
177 | ||
178 | #define MSR_IA32_EVNTSEL0 0x186 | |
179 | #define MSR_IA32_EVNTSEL1 0x187 | |
180 | ||
181 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 | |
182 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db | |
183 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc | |
184 | #define MSR_IA32_LASTINTFROMIP 0x1dd | |
185 | #define MSR_IA32_LASTINTTOIP 0x1de | |
186 | ||
187 | #define MSR_MTRRfix64K_00000 0x250 | |
188 | #define MSR_MTRRfix16K_80000 0x258 | |
189 | #define MSR_MTRRfix16K_A0000 0x259 | |
190 | #define MSR_MTRRfix4K_C0000 0x268 | |
191 | #define MSR_MTRRfix4K_C8000 0x269 | |
192 | #define MSR_MTRRfix4K_D0000 0x26a | |
193 | #define MSR_MTRRfix4K_D8000 0x26b | |
194 | #define MSR_MTRRfix4K_E0000 0x26c | |
195 | #define MSR_MTRRfix4K_E8000 0x26d | |
196 | #define MSR_MTRRfix4K_F0000 0x26e | |
197 | #define MSR_MTRRfix4K_F8000 0x26f | |
198 | #define MSR_MTRRdefType 0x2ff | |
199 | ||
200 | #define MSR_IA32_MC0_CTL 0x400 | |
201 | #define MSR_IA32_MC0_STATUS 0x401 | |
202 | #define MSR_IA32_MC0_ADDR 0x402 | |
203 | #define MSR_IA32_MC0_MISC 0x403 | |
204 | ||
205 | #define MSR_P6_PERFCTR0 0xc1 | |
206 | #define MSR_P6_PERFCTR1 0xc2 | |
207 | #define MSR_P6_EVNTSEL0 0x186 | |
208 | #define MSR_P6_EVNTSEL1 0x187 | |
209 | ||
210 | /* K7/K8 MSRs. Not complete. See the architecture manual for a more complete list. */ | |
211 | #define MSR_K7_EVNTSEL0 0xC0010000 | |
212 | #define MSR_K7_PERFCTR0 0xC0010004 | |
213 | #define MSR_K7_EVNTSEL1 0xC0010001 | |
214 | #define MSR_K7_PERFCTR1 0xC0010005 | |
215 | #define MSR_K7_EVNTSEL2 0xC0010002 | |
216 | #define MSR_K7_PERFCTR2 0xC0010006 | |
217 | #define MSR_K7_EVNTSEL3 0xC0010003 | |
218 | #define MSR_K7_PERFCTR3 0xC0010007 | |
219 | #define MSR_K8_TOP_MEM1 0xC001001A | |
220 | #define MSR_K8_TOP_MEM2 0xC001001D | |
221 | #define MSR_K8_SYSCFG 0xC0000010 | |
222 | ||
223 | /* K6 MSRs */ | |
224 | #define MSR_K6_EFER 0xC0000080 | |
225 | #define MSR_K6_STAR 0xC0000081 | |
226 | #define MSR_K6_WHCR 0xC0000082 | |
227 | #define MSR_K6_UWCCR 0xC0000085 | |
228 | #define MSR_K6_PSOR 0xC0000087 | |
229 | #define MSR_K6_PFIR 0xC0000088 | |
230 | ||
231 | /* Centaur-Hauls/IDT defined MSRs. */ | |
232 | #define MSR_IDT_FCR1 0x107 | |
233 | #define MSR_IDT_FCR2 0x108 | |
234 | #define MSR_IDT_FCR3 0x109 | |
235 | #define MSR_IDT_FCR4 0x10a | |
236 | ||
237 | #define MSR_IDT_MCR0 0x110 | |
238 | #define MSR_IDT_MCR1 0x111 | |
239 | #define MSR_IDT_MCR2 0x112 | |
240 | #define MSR_IDT_MCR3 0x113 | |
241 | #define MSR_IDT_MCR4 0x114 | |
242 | #define MSR_IDT_MCR5 0x115 | |
243 | #define MSR_IDT_MCR6 0x116 | |
244 | #define MSR_IDT_MCR7 0x117 | |
245 | #define MSR_IDT_MCR_CTRL 0x120 | |
246 | ||
247 | /* VIA Cyrix defined MSRs*/ | |
248 | #define MSR_VIA_FCR 0x1107 | |
249 | #define MSR_VIA_LONGHAUL 0x110a | |
250 | #define MSR_VIA_RNG 0x110b | |
251 | #define MSR_VIA_BCR2 0x1147 | |
252 | ||
253 | /* Intel defined MSRs. */ | |
254 | #define MSR_IA32_P5_MC_ADDR 0 | |
255 | #define MSR_IA32_P5_MC_TYPE 1 | |
256 | #define MSR_IA32_PLATFORM_ID 0x17 | |
257 | #define MSR_IA32_EBL_CR_POWERON 0x2a | |
258 | ||
259 | #define MSR_IA32_APICBASE 0x1b | |
260 | #define MSR_IA32_APICBASE_BSP (1<<8) | |
261 | #define MSR_IA32_APICBASE_ENABLE (1<<11) | |
262 | #define MSR_IA32_APICBASE_BASE (0xfffff<<12) | |
263 | ||
264 | /* P4/Xeon+ specific */ | |
265 | #define MSR_IA32_MCG_EAX 0x180 | |
266 | #define MSR_IA32_MCG_EBX 0x181 | |
267 | #define MSR_IA32_MCG_ECX 0x182 | |
268 | #define MSR_IA32_MCG_EDX 0x183 | |
269 | #define MSR_IA32_MCG_ESI 0x184 | |
270 | #define MSR_IA32_MCG_EDI 0x185 | |
271 | #define MSR_IA32_MCG_EBP 0x186 | |
272 | #define MSR_IA32_MCG_ESP 0x187 | |
273 | #define MSR_IA32_MCG_EFLAGS 0x188 | |
274 | #define MSR_IA32_MCG_EIP 0x189 | |
275 | #define MSR_IA32_MCG_RESERVED 0x18A | |
276 | ||
277 | #define MSR_P6_EVNTSEL0 0x186 | |
278 | #define MSR_P6_EVNTSEL1 0x187 | |
279 | ||
280 | #define MSR_IA32_PERF_STATUS 0x198 | |
281 | #define MSR_IA32_PERF_CTL 0x199 | |
282 | ||
283 | #define MSR_IA32_THERM_CONTROL 0x19a | |
284 | #define MSR_IA32_THERM_INTERRUPT 0x19b | |
285 | #define MSR_IA32_THERM_STATUS 0x19c | |
286 | #define MSR_IA32_MISC_ENABLE 0x1a0 | |
287 | ||
288 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 | |
289 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db | |
290 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc | |
291 | #define MSR_IA32_LASTINTFROMIP 0x1dd | |
292 | #define MSR_IA32_LASTINTTOIP 0x1de | |
293 | ||
294 | #define MSR_IA32_MC0_CTL 0x400 | |
295 | #define MSR_IA32_MC0_STATUS 0x401 | |
296 | #define MSR_IA32_MC0_ADDR 0x402 | |
297 | #define MSR_IA32_MC0_MISC 0x403 | |
298 | ||
299 | /* Pentium IV performance counter MSRs */ | |
300 | #define MSR_P4_BPU_PERFCTR0 0x300 | |
301 | #define MSR_P4_BPU_PERFCTR1 0x301 | |
302 | #define MSR_P4_BPU_PERFCTR2 0x302 | |
303 | #define MSR_P4_BPU_PERFCTR3 0x303 | |
304 | #define MSR_P4_MS_PERFCTR0 0x304 | |
305 | #define MSR_P4_MS_PERFCTR1 0x305 | |
306 | #define MSR_P4_MS_PERFCTR2 0x306 | |
307 | #define MSR_P4_MS_PERFCTR3 0x307 | |
308 | #define MSR_P4_FLAME_PERFCTR0 0x308 | |
309 | #define MSR_P4_FLAME_PERFCTR1 0x309 | |
310 | #define MSR_P4_FLAME_PERFCTR2 0x30a | |
311 | #define MSR_P4_FLAME_PERFCTR3 0x30b | |
312 | #define MSR_P4_IQ_PERFCTR0 0x30c | |
313 | #define MSR_P4_IQ_PERFCTR1 0x30d | |
314 | #define MSR_P4_IQ_PERFCTR2 0x30e | |
315 | #define MSR_P4_IQ_PERFCTR3 0x30f | |
316 | #define MSR_P4_IQ_PERFCTR4 0x310 | |
317 | #define MSR_P4_IQ_PERFCTR5 0x311 | |
318 | #define MSR_P4_BPU_CCCR0 0x360 | |
319 | #define MSR_P4_BPU_CCCR1 0x361 | |
320 | #define MSR_P4_BPU_CCCR2 0x362 | |
321 | #define MSR_P4_BPU_CCCR3 0x363 | |
322 | #define MSR_P4_MS_CCCR0 0x364 | |
323 | #define MSR_P4_MS_CCCR1 0x365 | |
324 | #define MSR_P4_MS_CCCR2 0x366 | |
325 | #define MSR_P4_MS_CCCR3 0x367 | |
326 | #define MSR_P4_FLAME_CCCR0 0x368 | |
327 | #define MSR_P4_FLAME_CCCR1 0x369 | |
328 | #define MSR_P4_FLAME_CCCR2 0x36a | |
329 | #define MSR_P4_FLAME_CCCR3 0x36b | |
330 | #define MSR_P4_IQ_CCCR0 0x36c | |
331 | #define MSR_P4_IQ_CCCR1 0x36d | |
332 | #define MSR_P4_IQ_CCCR2 0x36e | |
333 | #define MSR_P4_IQ_CCCR3 0x36f | |
334 | #define MSR_P4_IQ_CCCR4 0x370 | |
335 | #define MSR_P4_IQ_CCCR5 0x371 | |
336 | #define MSR_P4_ALF_ESCR0 0x3ca | |
337 | #define MSR_P4_ALF_ESCR1 0x3cb | |
338 | #define MSR_P4_BPU_ESCR0 0x3b2 | |
339 | #define MSR_P4_BPU_ESCR1 0x3b3 | |
340 | #define MSR_P4_BSU_ESCR0 0x3a0 | |
341 | #define MSR_P4_BSU_ESCR1 0x3a1 | |
342 | #define MSR_P4_CRU_ESCR0 0x3b8 | |
343 | #define MSR_P4_CRU_ESCR1 0x3b9 | |
344 | #define MSR_P4_CRU_ESCR2 0x3cc | |
345 | #define MSR_P4_CRU_ESCR3 0x3cd | |
346 | #define MSR_P4_CRU_ESCR4 0x3e0 | |
347 | #define MSR_P4_CRU_ESCR5 0x3e1 | |
348 | #define MSR_P4_DAC_ESCR0 0x3a8 | |
349 | #define MSR_P4_DAC_ESCR1 0x3a9 | |
350 | #define MSR_P4_FIRM_ESCR0 0x3a4 | |
351 | #define MSR_P4_FIRM_ESCR1 0x3a5 | |
352 | #define MSR_P4_FLAME_ESCR0 0x3a6 | |
353 | #define MSR_P4_FLAME_ESCR1 0x3a7 | |
354 | #define MSR_P4_FSB_ESCR0 0x3a2 | |
355 | #define MSR_P4_FSB_ESCR1 0x3a3 | |
356 | #define MSR_P4_IQ_ESCR0 0x3ba | |
357 | #define MSR_P4_IQ_ESCR1 0x3bb | |
358 | #define MSR_P4_IS_ESCR0 0x3b4 | |
359 | #define MSR_P4_IS_ESCR1 0x3b5 | |
360 | #define MSR_P4_ITLB_ESCR0 0x3b6 | |
361 | #define MSR_P4_ITLB_ESCR1 0x3b7 | |
362 | #define MSR_P4_IX_ESCR0 0x3c8 | |
363 | #define MSR_P4_IX_ESCR1 0x3c9 | |
364 | #define MSR_P4_MOB_ESCR0 0x3aa | |
365 | #define MSR_P4_MOB_ESCR1 0x3ab | |
366 | #define MSR_P4_MS_ESCR0 0x3c0 | |
367 | #define MSR_P4_MS_ESCR1 0x3c1 | |
368 | #define MSR_P4_PMH_ESCR0 0x3ac | |
369 | #define MSR_P4_PMH_ESCR1 0x3ad | |
370 | #define MSR_P4_RAT_ESCR0 0x3bc | |
371 | #define MSR_P4_RAT_ESCR1 0x3bd | |
372 | #define MSR_P4_SAAT_ESCR0 0x3ae | |
373 | #define MSR_P4_SAAT_ESCR1 0x3af | |
374 | #define MSR_P4_SSU_ESCR0 0x3be | |
375 | #define MSR_P4_SSU_ESCR1 0x3bf /* guess: not defined in manual */ | |
376 | #define MSR_P4_TBPU_ESCR0 0x3c2 | |
377 | #define MSR_P4_TBPU_ESCR1 0x3c3 | |
378 | #define MSR_P4_TC_ESCR0 0x3c4 | |
379 | #define MSR_P4_TC_ESCR1 0x3c5 | |
380 | #define MSR_P4_U2L_ESCR0 0x3b0 | |
381 | #define MSR_P4_U2L_ESCR1 0x3b1 | |
382 | ||
383 | #endif |