at v4.20 1065 lines 35 kB view raw
1/* 2 * tools/testing/selftests/kvm/include/x86_64/processor.h 3 * 4 * Copyright (C) 2018, Google LLC. 5 * 6 * This work is licensed under the terms of the GNU GPL, version 2. 7 * 8 */ 9 10#ifndef SELFTEST_KVM_PROCESSOR_H 11#define SELFTEST_KVM_PROCESSOR_H 12 13#include <assert.h> 14#include <stdint.h> 15 16#define X86_EFLAGS_FIXED (1u << 1) 17 18#define X86_CR4_VME (1ul << 0) 19#define X86_CR4_PVI (1ul << 1) 20#define X86_CR4_TSD (1ul << 2) 21#define X86_CR4_DE (1ul << 3) 22#define X86_CR4_PSE (1ul << 4) 23#define X86_CR4_PAE (1ul << 5) 24#define X86_CR4_MCE (1ul << 6) 25#define X86_CR4_PGE (1ul << 7) 26#define X86_CR4_PCE (1ul << 8) 27#define X86_CR4_OSFXSR (1ul << 9) 28#define X86_CR4_OSXMMEXCPT (1ul << 10) 29#define X86_CR4_UMIP (1ul << 11) 30#define X86_CR4_VMXE (1ul << 13) 31#define X86_CR4_SMXE (1ul << 14) 32#define X86_CR4_FSGSBASE (1ul << 16) 33#define X86_CR4_PCIDE (1ul << 17) 34#define X86_CR4_OSXSAVE (1ul << 18) 35#define X86_CR4_SMEP (1ul << 20) 36#define X86_CR4_SMAP (1ul << 21) 37#define X86_CR4_PKE (1ul << 22) 38 39/* The enum values match the intruction encoding of each register */ 40enum x86_register { 41 RAX = 0, 42 RCX, 43 RDX, 44 RBX, 45 RSP, 46 RBP, 47 RSI, 48 RDI, 49 R8, 50 R9, 51 R10, 52 R11, 53 R12, 54 R13, 55 R14, 56 R15, 57}; 58 59struct desc64 { 60 uint16_t limit0; 61 uint16_t base0; 62 unsigned base1:8, s:1, type:4, dpl:2, p:1; 63 unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8; 64 uint32_t base3; 65 uint32_t zero1; 66} __attribute__((packed)); 67 68struct desc_ptr { 69 uint16_t size; 70 uint64_t address; 71} __attribute__((packed)); 72 73static inline uint64_t get_desc64_base(const struct desc64 *desc) 74{ 75 return ((uint64_t)desc->base3 << 32) | 76 (desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24)); 77} 78 79static inline uint64_t rdtsc(void) 80{ 81 uint32_t eax, edx; 82 83 /* 84 * The lfence is to wait (on Intel CPUs) until all previous 85 * instructions have been executed. 86 */ 87 __asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx)); 88 return ((uint64_t)edx) << 32 | eax; 89} 90 91static inline uint64_t rdtscp(uint32_t *aux) 92{ 93 uint32_t eax, edx; 94 95 __asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux)); 96 return ((uint64_t)edx) << 32 | eax; 97} 98 99static inline uint64_t rdmsr(uint32_t msr) 100{ 101 uint32_t a, d; 102 103 __asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory"); 104 105 return a | ((uint64_t) d << 32); 106} 107 108static inline void wrmsr(uint32_t msr, uint64_t value) 109{ 110 uint32_t a = value; 111 uint32_t d = value >> 32; 112 113 __asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory"); 114} 115 116 117static inline uint16_t inw(uint16_t port) 118{ 119 uint16_t tmp; 120 121 __asm__ __volatile__("in %%dx, %%ax" 122 : /* output */ "=a" (tmp) 123 : /* input */ "d" (port)); 124 125 return tmp; 126} 127 128static inline uint16_t get_es(void) 129{ 130 uint16_t es; 131 132 __asm__ __volatile__("mov %%es, %[es]" 133 : /* output */ [es]"=rm"(es)); 134 return es; 135} 136 137static inline uint16_t get_cs(void) 138{ 139 uint16_t cs; 140 141 __asm__ __volatile__("mov %%cs, %[cs]" 142 : /* output */ [cs]"=rm"(cs)); 143 return cs; 144} 145 146static inline uint16_t get_ss(void) 147{ 148 uint16_t ss; 149 150 __asm__ __volatile__("mov %%ss, %[ss]" 151 : /* output */ [ss]"=rm"(ss)); 152 return ss; 153} 154 155static inline uint16_t get_ds(void) 156{ 157 uint16_t ds; 158 159 __asm__ __volatile__("mov %%ds, %[ds]" 160 : /* output */ [ds]"=rm"(ds)); 161 return ds; 162} 163 164static inline uint16_t get_fs(void) 165{ 166 uint16_t fs; 167 168 __asm__ __volatile__("mov %%fs, %[fs]" 169 : /* output */ [fs]"=rm"(fs)); 170 return fs; 171} 172 173static inline uint16_t get_gs(void) 174{ 175 uint16_t gs; 176 177 __asm__ __volatile__("mov %%gs, %[gs]" 178 : /* output */ [gs]"=rm"(gs)); 179 return gs; 180} 181 182static inline uint16_t get_tr(void) 183{ 184 uint16_t tr; 185 186 __asm__ __volatile__("str %[tr]" 187 : /* output */ [tr]"=rm"(tr)); 188 return tr; 189} 190 191static inline uint64_t get_cr0(void) 192{ 193 uint64_t cr0; 194 195 __asm__ __volatile__("mov %%cr0, %[cr0]" 196 : /* output */ [cr0]"=r"(cr0)); 197 return cr0; 198} 199 200static inline uint64_t get_cr3(void) 201{ 202 uint64_t cr3; 203 204 __asm__ __volatile__("mov %%cr3, %[cr3]" 205 : /* output */ [cr3]"=r"(cr3)); 206 return cr3; 207} 208 209static inline uint64_t get_cr4(void) 210{ 211 uint64_t cr4; 212 213 __asm__ __volatile__("mov %%cr4, %[cr4]" 214 : /* output */ [cr4]"=r"(cr4)); 215 return cr4; 216} 217 218static inline void set_cr4(uint64_t val) 219{ 220 __asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory"); 221} 222 223static inline uint64_t get_gdt_base(void) 224{ 225 struct desc_ptr gdt; 226 __asm__ __volatile__("sgdt %[gdt]" 227 : /* output */ [gdt]"=m"(gdt)); 228 return gdt.address; 229} 230 231static inline uint64_t get_idt_base(void) 232{ 233 struct desc_ptr idt; 234 __asm__ __volatile__("sidt %[idt]" 235 : /* output */ [idt]"=m"(idt)); 236 return idt.address; 237} 238 239#define SET_XMM(__var, __xmm) \ 240 asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm) 241 242static inline void set_xmm(int n, unsigned long val) 243{ 244 switch (n) { 245 case 0: 246 SET_XMM(val, xmm0); 247 break; 248 case 1: 249 SET_XMM(val, xmm1); 250 break; 251 case 2: 252 SET_XMM(val, xmm2); 253 break; 254 case 3: 255 SET_XMM(val, xmm3); 256 break; 257 case 4: 258 SET_XMM(val, xmm4); 259 break; 260 case 5: 261 SET_XMM(val, xmm5); 262 break; 263 case 6: 264 SET_XMM(val, xmm6); 265 break; 266 case 7: 267 SET_XMM(val, xmm7); 268 break; 269 } 270} 271 272typedef unsigned long v1di __attribute__ ((vector_size (8))); 273static inline unsigned long get_xmm(int n) 274{ 275 assert(n >= 0 && n <= 7); 276 277 register v1di xmm0 __asm__("%xmm0"); 278 register v1di xmm1 __asm__("%xmm1"); 279 register v1di xmm2 __asm__("%xmm2"); 280 register v1di xmm3 __asm__("%xmm3"); 281 register v1di xmm4 __asm__("%xmm4"); 282 register v1di xmm5 __asm__("%xmm5"); 283 register v1di xmm6 __asm__("%xmm6"); 284 register v1di xmm7 __asm__("%xmm7"); 285 switch (n) { 286 case 0: 287 return (unsigned long)xmm0; 288 case 1: 289 return (unsigned long)xmm1; 290 case 2: 291 return (unsigned long)xmm2; 292 case 3: 293 return (unsigned long)xmm3; 294 case 4: 295 return (unsigned long)xmm4; 296 case 5: 297 return (unsigned long)xmm5; 298 case 6: 299 return (unsigned long)xmm6; 300 case 7: 301 return (unsigned long)xmm7; 302 } 303 return 0; 304} 305 306struct kvm_x86_state; 307struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid); 308void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, 309 struct kvm_x86_state *state); 310 311struct kvm_cpuid2 *kvm_get_supported_cpuid(void); 312void vcpu_set_cpuid(struct kvm_vm *vm, uint32_t vcpuid, 313 struct kvm_cpuid2 *cpuid); 314 315struct kvm_cpuid_entry2 * 316kvm_get_supported_cpuid_index(uint32_t function, uint32_t index); 317 318static inline struct kvm_cpuid_entry2 * 319kvm_get_supported_cpuid_entry(uint32_t function) 320{ 321 return kvm_get_supported_cpuid_index(function, 0); 322} 323 324uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index); 325void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index, 326 uint64_t msr_value); 327 328/* 329 * Basic CPU control in CR0 330 */ 331#define X86_CR0_PE (1UL<<0) /* Protection Enable */ 332#define X86_CR0_MP (1UL<<1) /* Monitor Coprocessor */ 333#define X86_CR0_EM (1UL<<2) /* Emulation */ 334#define X86_CR0_TS (1UL<<3) /* Task Switched */ 335#define X86_CR0_ET (1UL<<4) /* Extension Type */ 336#define X86_CR0_NE (1UL<<5) /* Numeric Error */ 337#define X86_CR0_WP (1UL<<16) /* Write Protect */ 338#define X86_CR0_AM (1UL<<18) /* Alignment Mask */ 339#define X86_CR0_NW (1UL<<29) /* Not Write-through */ 340#define X86_CR0_CD (1UL<<30) /* Cache Disable */ 341#define X86_CR0_PG (1UL<<31) /* Paging */ 342 343/* 344 * CPU model specific register (MSR) numbers. 345 */ 346 347/* x86-64 specific MSRs */ 348#define MSR_EFER 0xc0000080 /* extended feature register */ 349#define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ 350#define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ 351#define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */ 352#define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ 353#define MSR_FS_BASE 0xc0000100 /* 64bit FS base */ 354#define MSR_GS_BASE 0xc0000101 /* 64bit GS base */ 355#define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */ 356#define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */ 357 358/* EFER bits: */ 359#define EFER_SCE (1<<0) /* SYSCALL/SYSRET */ 360#define EFER_LME (1<<8) /* Long mode enable */ 361#define EFER_LMA (1<<10) /* Long mode active (read-only) */ 362#define EFER_NX (1<<11) /* No execute enable */ 363#define EFER_SVME (1<<12) /* Enable virtualization */ 364#define EFER_LMSLE (1<<13) /* Long Mode Segment Limit Enable */ 365#define EFER_FFXSR (1<<14) /* Enable Fast FXSAVE/FXRSTOR */ 366 367/* Intel MSRs. Some also available on other CPUs */ 368 369#define MSR_PPIN_CTL 0x0000004e 370#define MSR_PPIN 0x0000004f 371 372#define MSR_IA32_PERFCTR0 0x000000c1 373#define MSR_IA32_PERFCTR1 0x000000c2 374#define MSR_FSB_FREQ 0x000000cd 375#define MSR_PLATFORM_INFO 0x000000ce 376#define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31 377#define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT) 378 379#define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2 380#define NHM_C3_AUTO_DEMOTE (1UL << 25) 381#define NHM_C1_AUTO_DEMOTE (1UL << 26) 382#define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25) 383#define SNB_C1_AUTO_UNDEMOTE (1UL << 27) 384#define SNB_C3_AUTO_UNDEMOTE (1UL << 28) 385 386#define MSR_MTRRcap 0x000000fe 387#define MSR_IA32_BBL_CR_CTL 0x00000119 388#define MSR_IA32_BBL_CR_CTL3 0x0000011e 389 390#define MSR_IA32_SYSENTER_CS 0x00000174 391#define MSR_IA32_SYSENTER_ESP 0x00000175 392#define MSR_IA32_SYSENTER_EIP 0x00000176 393 394#define MSR_IA32_MCG_CAP 0x00000179 395#define MSR_IA32_MCG_STATUS 0x0000017a 396#define MSR_IA32_MCG_CTL 0x0000017b 397#define MSR_IA32_MCG_EXT_CTL 0x000004d0 398 399#define MSR_OFFCORE_RSP_0 0x000001a6 400#define MSR_OFFCORE_RSP_1 0x000001a7 401#define MSR_TURBO_RATIO_LIMIT 0x000001ad 402#define MSR_TURBO_RATIO_LIMIT1 0x000001ae 403#define MSR_TURBO_RATIO_LIMIT2 0x000001af 404 405#define MSR_LBR_SELECT 0x000001c8 406#define MSR_LBR_TOS 0x000001c9 407#define MSR_LBR_NHM_FROM 0x00000680 408#define MSR_LBR_NHM_TO 0x000006c0 409#define MSR_LBR_CORE_FROM 0x00000040 410#define MSR_LBR_CORE_TO 0x00000060 411 412#define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */ 413#define LBR_INFO_MISPRED BIT_ULL(63) 414#define LBR_INFO_IN_TX BIT_ULL(62) 415#define LBR_INFO_ABORT BIT_ULL(61) 416#define LBR_INFO_CYCLES 0xffff 417 418#define MSR_IA32_PEBS_ENABLE 0x000003f1 419#define MSR_IA32_DS_AREA 0x00000600 420#define MSR_IA32_PERF_CAPABILITIES 0x00000345 421#define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6 422 423#define MSR_IA32_RTIT_CTL 0x00000570 424#define MSR_IA32_RTIT_STATUS 0x00000571 425#define MSR_IA32_RTIT_ADDR0_A 0x00000580 426#define MSR_IA32_RTIT_ADDR0_B 0x00000581 427#define MSR_IA32_RTIT_ADDR1_A 0x00000582 428#define MSR_IA32_RTIT_ADDR1_B 0x00000583 429#define MSR_IA32_RTIT_ADDR2_A 0x00000584 430#define MSR_IA32_RTIT_ADDR2_B 0x00000585 431#define MSR_IA32_RTIT_ADDR3_A 0x00000586 432#define MSR_IA32_RTIT_ADDR3_B 0x00000587 433#define MSR_IA32_RTIT_CR3_MATCH 0x00000572 434#define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560 435#define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561 436 437#define MSR_MTRRfix64K_00000 0x00000250 438#define MSR_MTRRfix16K_80000 0x00000258 439#define MSR_MTRRfix16K_A0000 0x00000259 440#define MSR_MTRRfix4K_C0000 0x00000268 441#define MSR_MTRRfix4K_C8000 0x00000269 442#define MSR_MTRRfix4K_D0000 0x0000026a 443#define MSR_MTRRfix4K_D8000 0x0000026b 444#define MSR_MTRRfix4K_E0000 0x0000026c 445#define MSR_MTRRfix4K_E8000 0x0000026d 446#define MSR_MTRRfix4K_F0000 0x0000026e 447#define MSR_MTRRfix4K_F8000 0x0000026f 448#define MSR_MTRRdefType 0x000002ff 449 450#define MSR_IA32_CR_PAT 0x00000277 451 452#define MSR_IA32_DEBUGCTLMSR 0x000001d9 453#define MSR_IA32_LASTBRANCHFROMIP 0x000001db 454#define MSR_IA32_LASTBRANCHTOIP 0x000001dc 455#define MSR_IA32_LASTINTFROMIP 0x000001dd 456#define MSR_IA32_LASTINTTOIP 0x000001de 457 458/* DEBUGCTLMSR bits (others vary by model): */ 459#define DEBUGCTLMSR_LBR (1UL << 0) /* last branch recording */ 460#define DEBUGCTLMSR_BTF_SHIFT 1 461#define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */ 462#define DEBUGCTLMSR_TR (1UL << 6) 463#define DEBUGCTLMSR_BTS (1UL << 7) 464#define DEBUGCTLMSR_BTINT (1UL << 8) 465#define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9) 466#define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10) 467#define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11) 468#define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14 469#define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT) 470 471#define MSR_PEBS_FRONTEND 0x000003f7 472 473#define MSR_IA32_POWER_CTL 0x000001fc 474 475#define MSR_IA32_MC0_CTL 0x00000400 476#define MSR_IA32_MC0_STATUS 0x00000401 477#define MSR_IA32_MC0_ADDR 0x00000402 478#define MSR_IA32_MC0_MISC 0x00000403 479 480/* C-state Residency Counters */ 481#define MSR_PKG_C3_RESIDENCY 0x000003f8 482#define MSR_PKG_C6_RESIDENCY 0x000003f9 483#define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa 484#define MSR_PKG_C7_RESIDENCY 0x000003fa 485#define MSR_CORE_C3_RESIDENCY 0x000003fc 486#define MSR_CORE_C6_RESIDENCY 0x000003fd 487#define MSR_CORE_C7_RESIDENCY 0x000003fe 488#define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff 489#define MSR_PKG_C2_RESIDENCY 0x0000060d 490#define MSR_PKG_C8_RESIDENCY 0x00000630 491#define MSR_PKG_C9_RESIDENCY 0x00000631 492#define MSR_PKG_C10_RESIDENCY 0x00000632 493 494/* Interrupt Response Limit */ 495#define MSR_PKGC3_IRTL 0x0000060a 496#define MSR_PKGC6_IRTL 0x0000060b 497#define MSR_PKGC7_IRTL 0x0000060c 498#define MSR_PKGC8_IRTL 0x00000633 499#define MSR_PKGC9_IRTL 0x00000634 500#define MSR_PKGC10_IRTL 0x00000635 501 502/* Run Time Average Power Limiting (RAPL) Interface */ 503 504#define MSR_RAPL_POWER_UNIT 0x00000606 505 506#define MSR_PKG_POWER_LIMIT 0x00000610 507#define MSR_PKG_ENERGY_STATUS 0x00000611 508#define MSR_PKG_PERF_STATUS 0x00000613 509#define MSR_PKG_POWER_INFO 0x00000614 510 511#define MSR_DRAM_POWER_LIMIT 0x00000618 512#define MSR_DRAM_ENERGY_STATUS 0x00000619 513#define MSR_DRAM_PERF_STATUS 0x0000061b 514#define MSR_DRAM_POWER_INFO 0x0000061c 515 516#define MSR_PP0_POWER_LIMIT 0x00000638 517#define MSR_PP0_ENERGY_STATUS 0x00000639 518#define MSR_PP0_POLICY 0x0000063a 519#define MSR_PP0_PERF_STATUS 0x0000063b 520 521#define MSR_PP1_POWER_LIMIT 0x00000640 522#define MSR_PP1_ENERGY_STATUS 0x00000641 523#define MSR_PP1_POLICY 0x00000642 524 525/* Config TDP MSRs */ 526#define MSR_CONFIG_TDP_NOMINAL 0x00000648 527#define MSR_CONFIG_TDP_LEVEL_1 0x00000649 528#define MSR_CONFIG_TDP_LEVEL_2 0x0000064A 529#define MSR_CONFIG_TDP_CONTROL 0x0000064B 530#define MSR_TURBO_ACTIVATION_RATIO 0x0000064C 531 532#define MSR_PLATFORM_ENERGY_STATUS 0x0000064D 533 534#define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658 535#define MSR_PKG_ANY_CORE_C0_RES 0x00000659 536#define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A 537#define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B 538 539#define MSR_CORE_C1_RES 0x00000660 540#define MSR_MODULE_C6_RES_MS 0x00000664 541 542#define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668 543#define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669 544 545#define MSR_ATOM_CORE_RATIOS 0x0000066a 546#define MSR_ATOM_CORE_VIDS 0x0000066b 547#define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c 548#define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d 549 550 551#define MSR_CORE_PERF_LIMIT_REASONS 0x00000690 552#define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0 553#define MSR_RING_PERF_LIMIT_REASONS 0x000006B1 554 555/* Hardware P state interface */ 556#define MSR_PPERF 0x0000064e 557#define MSR_PERF_LIMIT_REASONS 0x0000064f 558#define MSR_PM_ENABLE 0x00000770 559#define MSR_HWP_CAPABILITIES 0x00000771 560#define MSR_HWP_REQUEST_PKG 0x00000772 561#define MSR_HWP_INTERRUPT 0x00000773 562#define MSR_HWP_REQUEST 0x00000774 563#define MSR_HWP_STATUS 0x00000777 564 565/* CPUID.6.EAX */ 566#define HWP_BASE_BIT (1<<7) 567#define HWP_NOTIFICATIONS_BIT (1<<8) 568#define HWP_ACTIVITY_WINDOW_BIT (1<<9) 569#define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10) 570#define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11) 571 572/* IA32_HWP_CAPABILITIES */ 573#define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff) 574#define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff) 575#define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff) 576#define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff) 577 578/* IA32_HWP_REQUEST */ 579#define HWP_MIN_PERF(x) (x & 0xff) 580#define HWP_MAX_PERF(x) ((x & 0xff) << 8) 581#define HWP_DESIRED_PERF(x) ((x & 0xff) << 16) 582#define HWP_ENERGY_PERF_PREFERENCE(x) (((unsigned long long) x & 0xff) << 24) 583#define HWP_EPP_PERFORMANCE 0x00 584#define HWP_EPP_BALANCE_PERFORMANCE 0x80 585#define HWP_EPP_BALANCE_POWERSAVE 0xC0 586#define HWP_EPP_POWERSAVE 0xFF 587#define HWP_ACTIVITY_WINDOW(x) ((unsigned long long)(x & 0xff3) << 32) 588#define HWP_PACKAGE_CONTROL(x) ((unsigned long long)(x & 0x1) << 42) 589 590/* IA32_HWP_STATUS */ 591#define HWP_GUARANTEED_CHANGE(x) (x & 0x1) 592#define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4) 593 594/* IA32_HWP_INTERRUPT */ 595#define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1) 596#define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2) 597 598#define MSR_AMD64_MC0_MASK 0xc0010044 599 600#define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x)) 601#define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x)) 602#define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x)) 603#define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x)) 604 605#define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x)) 606 607/* These are consecutive and not in the normal 4er MCE bank block */ 608#define MSR_IA32_MC0_CTL2 0x00000280 609#define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x)) 610 611#define MSR_P6_PERFCTR0 0x000000c1 612#define MSR_P6_PERFCTR1 0x000000c2 613#define MSR_P6_EVNTSEL0 0x00000186 614#define MSR_P6_EVNTSEL1 0x00000187 615 616#define MSR_KNC_PERFCTR0 0x00000020 617#define MSR_KNC_PERFCTR1 0x00000021 618#define MSR_KNC_EVNTSEL0 0x00000028 619#define MSR_KNC_EVNTSEL1 0x00000029 620 621/* Alternative perfctr range with full access. */ 622#define MSR_IA32_PMC0 0x000004c1 623 624/* AMD64 MSRs. Not complete. See the architecture manual for a more 625 complete list. */ 626 627#define MSR_AMD64_PATCH_LEVEL 0x0000008b 628#define MSR_AMD64_TSC_RATIO 0xc0000104 629#define MSR_AMD64_NB_CFG 0xc001001f 630#define MSR_AMD64_PATCH_LOADER 0xc0010020 631#define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140 632#define MSR_AMD64_OSVW_STATUS 0xc0010141 633#define MSR_AMD64_LS_CFG 0xc0011020 634#define MSR_AMD64_DC_CFG 0xc0011022 635#define MSR_AMD64_BU_CFG2 0xc001102a 636#define MSR_AMD64_IBSFETCHCTL 0xc0011030 637#define MSR_AMD64_IBSFETCHLINAD 0xc0011031 638#define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032 639#define MSR_AMD64_IBSFETCH_REG_COUNT 3 640#define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1) 641#define MSR_AMD64_IBSOPCTL 0xc0011033 642#define MSR_AMD64_IBSOPRIP 0xc0011034 643#define MSR_AMD64_IBSOPDATA 0xc0011035 644#define MSR_AMD64_IBSOPDATA2 0xc0011036 645#define MSR_AMD64_IBSOPDATA3 0xc0011037 646#define MSR_AMD64_IBSDCLINAD 0xc0011038 647#define MSR_AMD64_IBSDCPHYSAD 0xc0011039 648#define MSR_AMD64_IBSOP_REG_COUNT 7 649#define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1) 650#define MSR_AMD64_IBSCTL 0xc001103a 651#define MSR_AMD64_IBSBRTARGET 0xc001103b 652#define MSR_AMD64_IBSOPDATA4 0xc001103d 653#define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */ 654#define MSR_AMD64_SEV 0xc0010131 655#define MSR_AMD64_SEV_ENABLED_BIT 0 656#define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT) 657 658/* Fam 17h MSRs */ 659#define MSR_F17H_IRPERF 0xc00000e9 660 661/* Fam 16h MSRs */ 662#define MSR_F16H_L2I_PERF_CTL 0xc0010230 663#define MSR_F16H_L2I_PERF_CTR 0xc0010231 664#define MSR_F16H_DR1_ADDR_MASK 0xc0011019 665#define MSR_F16H_DR2_ADDR_MASK 0xc001101a 666#define MSR_F16H_DR3_ADDR_MASK 0xc001101b 667#define MSR_F16H_DR0_ADDR_MASK 0xc0011027 668 669/* Fam 15h MSRs */ 670#define MSR_F15H_PERF_CTL 0xc0010200 671#define MSR_F15H_PERF_CTR 0xc0010201 672#define MSR_F15H_NB_PERF_CTL 0xc0010240 673#define MSR_F15H_NB_PERF_CTR 0xc0010241 674#define MSR_F15H_PTSC 0xc0010280 675#define MSR_F15H_IC_CFG 0xc0011021 676 677/* Fam 10h MSRs */ 678#define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058 679#define FAM10H_MMIO_CONF_ENABLE (1<<0) 680#define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf 681#define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2 682#define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL 683#define FAM10H_MMIO_CONF_BASE_SHIFT 20 684#define MSR_FAM10H_NODE_ID 0xc001100c 685#define MSR_F10H_DECFG 0xc0011029 686#define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT 1 687#define MSR_F10H_DECFG_LFENCE_SERIALIZE BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT) 688 689/* K8 MSRs */ 690#define MSR_K8_TOP_MEM1 0xc001001a 691#define MSR_K8_TOP_MEM2 0xc001001d 692#define MSR_K8_SYSCFG 0xc0010010 693#define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT 23 694#define MSR_K8_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT) 695#define MSR_K8_INT_PENDING_MSG 0xc0010055 696/* C1E active bits in int pending message */ 697#define K8_INTP_C1E_ACTIVE_MASK 0x18000000 698#define MSR_K8_TSEG_ADDR 0xc0010112 699#define MSR_K8_TSEG_MASK 0xc0010113 700#define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */ 701#define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */ 702#define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */ 703 704/* K7 MSRs */ 705#define MSR_K7_EVNTSEL0 0xc0010000 706#define MSR_K7_PERFCTR0 0xc0010004 707#define MSR_K7_EVNTSEL1 0xc0010001 708#define MSR_K7_PERFCTR1 0xc0010005 709#define MSR_K7_EVNTSEL2 0xc0010002 710#define MSR_K7_PERFCTR2 0xc0010006 711#define MSR_K7_EVNTSEL3 0xc0010003 712#define MSR_K7_PERFCTR3 0xc0010007 713#define MSR_K7_CLK_CTL 0xc001001b 714#define MSR_K7_HWCR 0xc0010015 715#define MSR_K7_HWCR_SMMLOCK_BIT 0 716#define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT) 717#define MSR_K7_FID_VID_CTL 0xc0010041 718#define MSR_K7_FID_VID_STATUS 0xc0010042 719 720/* K6 MSRs */ 721#define MSR_K6_WHCR 0xc0000082 722#define MSR_K6_UWCCR 0xc0000085 723#define MSR_K6_EPMR 0xc0000086 724#define MSR_K6_PSOR 0xc0000087 725#define MSR_K6_PFIR 0xc0000088 726 727/* Centaur-Hauls/IDT defined MSRs. */ 728#define MSR_IDT_FCR1 0x00000107 729#define MSR_IDT_FCR2 0x00000108 730#define MSR_IDT_FCR3 0x00000109 731#define MSR_IDT_FCR4 0x0000010a 732 733#define MSR_IDT_MCR0 0x00000110 734#define MSR_IDT_MCR1 0x00000111 735#define MSR_IDT_MCR2 0x00000112 736#define MSR_IDT_MCR3 0x00000113 737#define MSR_IDT_MCR4 0x00000114 738#define MSR_IDT_MCR5 0x00000115 739#define MSR_IDT_MCR6 0x00000116 740#define MSR_IDT_MCR7 0x00000117 741#define MSR_IDT_MCR_CTRL 0x00000120 742 743/* VIA Cyrix defined MSRs*/ 744#define MSR_VIA_FCR 0x00001107 745#define MSR_VIA_LONGHAUL 0x0000110a 746#define MSR_VIA_RNG 0x0000110b 747#define MSR_VIA_BCR2 0x00001147 748 749/* Transmeta defined MSRs */ 750#define MSR_TMTA_LONGRUN_CTRL 0x80868010 751#define MSR_TMTA_LONGRUN_FLAGS 0x80868011 752#define MSR_TMTA_LRTI_READOUT 0x80868018 753#define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a 754 755/* Intel defined MSRs. */ 756#define MSR_IA32_P5_MC_ADDR 0x00000000 757#define MSR_IA32_P5_MC_TYPE 0x00000001 758#define MSR_IA32_TSC 0x00000010 759#define MSR_IA32_PLATFORM_ID 0x00000017 760#define MSR_IA32_EBL_CR_POWERON 0x0000002a 761#define MSR_EBC_FREQUENCY_ID 0x0000002c 762#define MSR_SMI_COUNT 0x00000034 763#define MSR_IA32_FEATURE_CONTROL 0x0000003a 764#define MSR_IA32_TSC_ADJUST 0x0000003b 765#define MSR_IA32_BNDCFGS 0x00000d90 766 767#define MSR_IA32_BNDCFGS_RSVD 0x00000ffc 768 769#define MSR_IA32_XSS 0x00000da0 770 771#define FEATURE_CONTROL_LOCKED (1<<0) 772#define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX (1<<1) 773#define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX (1<<2) 774#define FEATURE_CONTROL_LMCE (1<<20) 775 776#define MSR_IA32_APICBASE 0x0000001b 777#define MSR_IA32_APICBASE_BSP (1<<8) 778#define MSR_IA32_APICBASE_ENABLE (1<<11) 779#define MSR_IA32_APICBASE_BASE (0xfffff<<12) 780 781#define MSR_IA32_TSCDEADLINE 0x000006e0 782 783#define MSR_IA32_UCODE_WRITE 0x00000079 784#define MSR_IA32_UCODE_REV 0x0000008b 785 786#define MSR_IA32_SMM_MONITOR_CTL 0x0000009b 787#define MSR_IA32_SMBASE 0x0000009e 788 789#define MSR_IA32_PERF_STATUS 0x00000198 790#define MSR_IA32_PERF_CTL 0x00000199 791#define INTEL_PERF_CTL_MASK 0xffff 792#define MSR_AMD_PSTATE_DEF_BASE 0xc0010064 793#define MSR_AMD_PERF_STATUS 0xc0010063 794#define MSR_AMD_PERF_CTL 0xc0010062 795 796#define MSR_IA32_MPERF 0x000000e7 797#define MSR_IA32_APERF 0x000000e8 798 799#define MSR_IA32_THERM_CONTROL 0x0000019a 800#define MSR_IA32_THERM_INTERRUPT 0x0000019b 801 802#define THERM_INT_HIGH_ENABLE (1 << 0) 803#define THERM_INT_LOW_ENABLE (1 << 1) 804#define THERM_INT_PLN_ENABLE (1 << 24) 805 806#define MSR_IA32_THERM_STATUS 0x0000019c 807 808#define THERM_STATUS_PROCHOT (1 << 0) 809#define THERM_STATUS_POWER_LIMIT (1 << 10) 810 811#define MSR_THERM2_CTL 0x0000019d 812 813#define MSR_THERM2_CTL_TM_SELECT (1ULL << 16) 814 815#define MSR_IA32_MISC_ENABLE 0x000001a0 816 817#define MSR_IA32_TEMPERATURE_TARGET 0x000001a2 818 819#define MSR_MISC_FEATURE_CONTROL 0x000001a4 820#define MSR_MISC_PWR_MGMT 0x000001aa 821 822#define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0 823#define ENERGY_PERF_BIAS_PERFORMANCE 0 824#define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4 825#define ENERGY_PERF_BIAS_NORMAL 6 826#define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8 827#define ENERGY_PERF_BIAS_POWERSAVE 15 828 829#define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1 830 831#define PACKAGE_THERM_STATUS_PROCHOT (1 << 0) 832#define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10) 833 834#define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2 835 836#define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0) 837#define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1) 838#define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24) 839 840/* Thermal Thresholds Support */ 841#define THERM_INT_THRESHOLD0_ENABLE (1 << 15) 842#define THERM_SHIFT_THRESHOLD0 8 843#define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0) 844#define THERM_INT_THRESHOLD1_ENABLE (1 << 23) 845#define THERM_SHIFT_THRESHOLD1 16 846#define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1) 847#define THERM_STATUS_THRESHOLD0 (1 << 6) 848#define THERM_LOG_THRESHOLD0 (1 << 7) 849#define THERM_STATUS_THRESHOLD1 (1 << 8) 850#define THERM_LOG_THRESHOLD1 (1 << 9) 851 852/* MISC_ENABLE bits: architectural */ 853#define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0 854#define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT) 855#define MSR_IA32_MISC_ENABLE_TCC_BIT 1 856#define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT) 857#define MSR_IA32_MISC_ENABLE_EMON_BIT 7 858#define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT) 859#define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11 860#define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT) 861#define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12 862#define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT) 863#define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16 864#define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT) 865#define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18 866#define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT) 867#define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22 868#define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT) 869#define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23 870#define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT) 871#define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34 872#define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT) 873 874/* MISC_ENABLE bits: model-specific, meaning may vary from core to core */ 875#define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2 876#define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT) 877#define MSR_IA32_MISC_ENABLE_TM1_BIT 3 878#define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT) 879#define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4 880#define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT) 881#define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6 882#define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT) 883#define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8 884#define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT) 885#define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9 886#define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT) 887#define MSR_IA32_MISC_ENABLE_FERR_BIT 10 888#define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT) 889#define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10 890#define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT) 891#define MSR_IA32_MISC_ENABLE_TM2_BIT 13 892#define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT) 893#define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19 894#define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT) 895#define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20 896#define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT) 897#define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24 898#define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT) 899#define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37 900#define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT) 901#define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38 902#define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT) 903#define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39 904#define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT) 905 906/* MISC_FEATURES_ENABLES non-architectural features */ 907#define MSR_MISC_FEATURES_ENABLES 0x00000140 908 909#define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0 910#define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT) 911#define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1 912 913#define MSR_IA32_TSC_DEADLINE 0x000006E0 914 915/* P4/Xeon+ specific */ 916#define MSR_IA32_MCG_EAX 0x00000180 917#define MSR_IA32_MCG_EBX 0x00000181 918#define MSR_IA32_MCG_ECX 0x00000182 919#define MSR_IA32_MCG_EDX 0x00000183 920#define MSR_IA32_MCG_ESI 0x00000184 921#define MSR_IA32_MCG_EDI 0x00000185 922#define MSR_IA32_MCG_EBP 0x00000186 923#define MSR_IA32_MCG_ESP 0x00000187 924#define MSR_IA32_MCG_EFLAGS 0x00000188 925#define MSR_IA32_MCG_EIP 0x00000189 926#define MSR_IA32_MCG_RESERVED 0x0000018a 927 928/* Pentium IV performance counter MSRs */ 929#define MSR_P4_BPU_PERFCTR0 0x00000300 930#define MSR_P4_BPU_PERFCTR1 0x00000301 931#define MSR_P4_BPU_PERFCTR2 0x00000302 932#define MSR_P4_BPU_PERFCTR3 0x00000303 933#define MSR_P4_MS_PERFCTR0 0x00000304 934#define MSR_P4_MS_PERFCTR1 0x00000305 935#define MSR_P4_MS_PERFCTR2 0x00000306 936#define MSR_P4_MS_PERFCTR3 0x00000307 937#define MSR_P4_FLAME_PERFCTR0 0x00000308 938#define MSR_P4_FLAME_PERFCTR1 0x00000309 939#define MSR_P4_FLAME_PERFCTR2 0x0000030a 940#define MSR_P4_FLAME_PERFCTR3 0x0000030b 941#define MSR_P4_IQ_PERFCTR0 0x0000030c 942#define MSR_P4_IQ_PERFCTR1 0x0000030d 943#define MSR_P4_IQ_PERFCTR2 0x0000030e 944#define MSR_P4_IQ_PERFCTR3 0x0000030f 945#define MSR_P4_IQ_PERFCTR4 0x00000310 946#define MSR_P4_IQ_PERFCTR5 0x00000311 947#define MSR_P4_BPU_CCCR0 0x00000360 948#define MSR_P4_BPU_CCCR1 0x00000361 949#define MSR_P4_BPU_CCCR2 0x00000362 950#define MSR_P4_BPU_CCCR3 0x00000363 951#define MSR_P4_MS_CCCR0 0x00000364 952#define MSR_P4_MS_CCCR1 0x00000365 953#define MSR_P4_MS_CCCR2 0x00000366 954#define MSR_P4_MS_CCCR3 0x00000367 955#define MSR_P4_FLAME_CCCR0 0x00000368 956#define MSR_P4_FLAME_CCCR1 0x00000369 957#define MSR_P4_FLAME_CCCR2 0x0000036a 958#define MSR_P4_FLAME_CCCR3 0x0000036b 959#define MSR_P4_IQ_CCCR0 0x0000036c 960#define MSR_P4_IQ_CCCR1 0x0000036d 961#define MSR_P4_IQ_CCCR2 0x0000036e 962#define MSR_P4_IQ_CCCR3 0x0000036f 963#define MSR_P4_IQ_CCCR4 0x00000370 964#define MSR_P4_IQ_CCCR5 0x00000371 965#define MSR_P4_ALF_ESCR0 0x000003ca 966#define MSR_P4_ALF_ESCR1 0x000003cb 967#define MSR_P4_BPU_ESCR0 0x000003b2 968#define MSR_P4_BPU_ESCR1 0x000003b3 969#define MSR_P4_BSU_ESCR0 0x000003a0 970#define MSR_P4_BSU_ESCR1 0x000003a1 971#define MSR_P4_CRU_ESCR0 0x000003b8 972#define MSR_P4_CRU_ESCR1 0x000003b9 973#define MSR_P4_CRU_ESCR2 0x000003cc 974#define MSR_P4_CRU_ESCR3 0x000003cd 975#define MSR_P4_CRU_ESCR4 0x000003e0 976#define MSR_P4_CRU_ESCR5 0x000003e1 977#define MSR_P4_DAC_ESCR0 0x000003a8 978#define MSR_P4_DAC_ESCR1 0x000003a9 979#define MSR_P4_FIRM_ESCR0 0x000003a4 980#define MSR_P4_FIRM_ESCR1 0x000003a5 981#define MSR_P4_FLAME_ESCR0 0x000003a6 982#define MSR_P4_FLAME_ESCR1 0x000003a7 983#define MSR_P4_FSB_ESCR0 0x000003a2 984#define MSR_P4_FSB_ESCR1 0x000003a3 985#define MSR_P4_IQ_ESCR0 0x000003ba 986#define MSR_P4_IQ_ESCR1 0x000003bb 987#define MSR_P4_IS_ESCR0 0x000003b4 988#define MSR_P4_IS_ESCR1 0x000003b5 989#define MSR_P4_ITLB_ESCR0 0x000003b6 990#define MSR_P4_ITLB_ESCR1 0x000003b7 991#define MSR_P4_IX_ESCR0 0x000003c8 992#define MSR_P4_IX_ESCR1 0x000003c9 993#define MSR_P4_MOB_ESCR0 0x000003aa 994#define MSR_P4_MOB_ESCR1 0x000003ab 995#define MSR_P4_MS_ESCR0 0x000003c0 996#define MSR_P4_MS_ESCR1 0x000003c1 997#define MSR_P4_PMH_ESCR0 0x000003ac 998#define MSR_P4_PMH_ESCR1 0x000003ad 999#define MSR_P4_RAT_ESCR0 0x000003bc 1000#define MSR_P4_RAT_ESCR1 0x000003bd 1001#define MSR_P4_SAAT_ESCR0 0x000003ae 1002#define MSR_P4_SAAT_ESCR1 0x000003af 1003#define MSR_P4_SSU_ESCR0 0x000003be 1004#define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */ 1005 1006#define MSR_P4_TBPU_ESCR0 0x000003c2 1007#define MSR_P4_TBPU_ESCR1 0x000003c3 1008#define MSR_P4_TC_ESCR0 0x000003c4 1009#define MSR_P4_TC_ESCR1 0x000003c5 1010#define MSR_P4_U2L_ESCR0 0x000003b0 1011#define MSR_P4_U2L_ESCR1 0x000003b1 1012 1013#define MSR_P4_PEBS_MATRIX_VERT 0x000003f2 1014 1015/* Intel Core-based CPU performance counters */ 1016#define MSR_CORE_PERF_FIXED_CTR0 0x00000309 1017#define MSR_CORE_PERF_FIXED_CTR1 0x0000030a 1018#define MSR_CORE_PERF_FIXED_CTR2 0x0000030b 1019#define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d 1020#define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e 1021#define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f 1022#define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390 1023 1024/* Geode defined MSRs */ 1025#define MSR_GEODE_BUSCONT_CONF0 0x00001900 1026 1027/* Intel VT MSRs */ 1028#define MSR_IA32_VMX_BASIC 0x00000480 1029#define MSR_IA32_VMX_PINBASED_CTLS 0x00000481 1030#define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482 1031#define MSR_IA32_VMX_EXIT_CTLS 0x00000483 1032#define MSR_IA32_VMX_ENTRY_CTLS 0x00000484 1033#define MSR_IA32_VMX_MISC 0x00000485 1034#define MSR_IA32_VMX_CR0_FIXED0 0x00000486 1035#define MSR_IA32_VMX_CR0_FIXED1 0x00000487 1036#define MSR_IA32_VMX_CR4_FIXED0 0x00000488 1037#define MSR_IA32_VMX_CR4_FIXED1 0x00000489 1038#define MSR_IA32_VMX_VMCS_ENUM 0x0000048a 1039#define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b 1040#define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c 1041#define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d 1042#define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e 1043#define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f 1044#define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490 1045#define MSR_IA32_VMX_VMFUNC 0x00000491 1046 1047/* VMX_BASIC bits and bitmasks */ 1048#define VMX_BASIC_VMCS_SIZE_SHIFT 32 1049#define VMX_BASIC_TRUE_CTLS (1ULL << 55) 1050#define VMX_BASIC_64 0x0001000000000000LLU 1051#define VMX_BASIC_MEM_TYPE_SHIFT 50 1052#define VMX_BASIC_MEM_TYPE_MASK 0x003c000000000000LLU 1053#define VMX_BASIC_MEM_TYPE_WB 6LLU 1054#define VMX_BASIC_INOUT 0x0040000000000000LLU 1055 1056/* MSR_IA32_VMX_MISC bits */ 1057#define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29) 1058#define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE 0x1F 1059/* AMD-V MSRs */ 1060 1061#define MSR_VM_CR 0xc0010114 1062#define MSR_VM_IGNNE 0xc0010115 1063#define MSR_VM_HSAVE_PA 0xc0010117 1064 1065#endif /* SELFTEST_KVM_PROCESSOR_H */