Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

KVM: selftests: aarch64: Update tools copy of arm_pmuv3.h

Now that ARMV8_PMU_PMCR_N is made with GENMASK, update usages to treat
it as a pre-shifted mask.

Signed-off-by: James Clark <james.clark@arm.com>
Link: https://lore.kernel.org/r/20231211161331.1277825-9-james.clark@arm.com
Signed-off-by: Will Deacon <will@kernel.org>

authored by

James Clark and committed by
Will Deacon
c7b98bf0 a5f4ca68

+28 -20
+26 -17
tools/include/perf/arm_pmuv3.h
··· 218 218 #define ARMV8_PMU_PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/ 219 219 #define ARMV8_PMU_PMCR_LC (1 << 6) /* Overflow on 64 bit cycle counter */ 220 220 #define ARMV8_PMU_PMCR_LP (1 << 7) /* Long event counter enable */ 221 - #define ARMV8_PMU_PMCR_N_SHIFT 11 /* Number of counters supported */ 222 - #define ARMV8_PMU_PMCR_N_MASK 0x1f 223 - #define ARMV8_PMU_PMCR_MASK 0xff /* Mask for writable bits */ 221 + #define ARMV8_PMU_PMCR_N GENMASK(15, 11) /* Number of counters supported */ 222 + /* Mask for writable bits */ 223 + #define ARMV8_PMU_PMCR_MASK (ARMV8_PMU_PMCR_E | ARMV8_PMU_PMCR_P | \ 224 + ARMV8_PMU_PMCR_C | ARMV8_PMU_PMCR_D | \ 225 + ARMV8_PMU_PMCR_X | ARMV8_PMU_PMCR_DP | \ 226 + ARMV8_PMU_PMCR_LC | ARMV8_PMU_PMCR_LP) 224 227 225 228 /* 226 229 * PMOVSR: counters overflow flag status reg 227 230 */ 228 - #define ARMV8_PMU_OVSR_MASK 0xffffffff /* Mask for writable bits */ 229 - #define ARMV8_PMU_OVERFLOWED_MASK ARMV8_PMU_OVSR_MASK 231 + #define ARMV8_PMU_OVSR_P GENMASK(30, 0) 232 + #define ARMV8_PMU_OVSR_C BIT(31) 233 + /* Mask for writable bits is both P and C fields */ 234 + #define ARMV8_PMU_OVERFLOWED_MASK (ARMV8_PMU_OVSR_P | ARMV8_PMU_OVSR_C) 230 235 231 236 /* 232 237 * PMXEVTYPER: Event selection reg 233 238 */ 234 - #define ARMV8_PMU_EVTYPE_MASK 0xc800ffff /* Mask for writable bits */ 235 - #define ARMV8_PMU_EVTYPE_EVENT 0xffff /* Mask for EVENT bits */ 239 + #define ARMV8_PMU_EVTYPE_EVENT GENMASK(15, 0) /* Mask for EVENT bits */ 240 + #define ARMV8_PMU_EVTYPE_TH GENMASK(43, 32) 241 + #define ARMV8_PMU_EVTYPE_TC GENMASK(63, 61) 236 242 237 243 /* 238 244 * Event filters for PMUv3 239 245 */ 240 - #define ARMV8_PMU_EXCLUDE_EL1 (1U << 31) 241 - #define ARMV8_PMU_EXCLUDE_EL0 (1U << 30) 242 - #define ARMV8_PMU_INCLUDE_EL2 (1U << 27) 246 + #define ARMV8_PMU_EXCLUDE_EL1 (1U << 31) 247 + #define ARMV8_PMU_EXCLUDE_EL0 (1U << 30) 248 + #define ARMV8_PMU_EXCLUDE_NS_EL1 (1U << 29) 249 + #define ARMV8_PMU_EXCLUDE_NS_EL0 (1U << 28) 250 + #define ARMV8_PMU_INCLUDE_EL2 (1U << 27) 251 + #define ARMV8_PMU_EXCLUDE_EL3 (1U << 26) 243 252 244 253 /* 245 254 * PMUSERENR: user enable reg 246 255 */ 247 - #define ARMV8_PMU_USERENR_MASK 0xf /* Mask for writable bits */ 248 256 #define ARMV8_PMU_USERENR_EN (1 << 0) /* PMU regs can be accessed at EL0 */ 249 257 #define ARMV8_PMU_USERENR_SW (1 << 1) /* PMSWINC can be written at EL0 */ 250 258 #define ARMV8_PMU_USERENR_CR (1 << 2) /* Cycle counter can be read at EL0 */ 251 259 #define ARMV8_PMU_USERENR_ER (1 << 3) /* Event counter can be read at EL0 */ 260 + /* Mask for writable bits */ 261 + #define ARMV8_PMU_USERENR_MASK (ARMV8_PMU_USERENR_EN | ARMV8_PMU_USERENR_SW | \ 262 + ARMV8_PMU_USERENR_CR | ARMV8_PMU_USERENR_ER) 252 263 253 264 /* PMMIR_EL1.SLOTS mask */ 254 - #define ARMV8_PMU_SLOTS_MASK 0xff 255 - 256 - #define ARMV8_PMU_BUS_SLOTS_SHIFT 8 257 - #define ARMV8_PMU_BUS_SLOTS_MASK 0xff 258 - #define ARMV8_PMU_BUS_WIDTH_SHIFT 16 259 - #define ARMV8_PMU_BUS_WIDTH_MASK 0xf 265 + #define ARMV8_PMU_SLOTS GENMASK(7, 0) 266 + #define ARMV8_PMU_BUS_SLOTS GENMASK(15, 8) 267 + #define ARMV8_PMU_BUS_WIDTH GENMASK(19, 16) 268 + #define ARMV8_PMU_THWIDTH GENMASK(23, 20) 260 269 261 270 /* 262 271 * This code is really good
+2 -3
tools/testing/selftests/kvm/aarch64/vpmu_counter_access.c
··· 42 42 43 43 static uint64_t get_pmcr_n(uint64_t pmcr) 44 44 { 45 - return (pmcr >> ARMV8_PMU_PMCR_N_SHIFT) & ARMV8_PMU_PMCR_N_MASK; 45 + return FIELD_GET(ARMV8_PMU_PMCR_N, pmcr); 46 46 } 47 47 48 48 static void set_pmcr_n(uint64_t *pmcr, uint64_t pmcr_n) 49 49 { 50 - *pmcr = *pmcr & ~(ARMV8_PMU_PMCR_N_MASK << ARMV8_PMU_PMCR_N_SHIFT); 51 - *pmcr |= (pmcr_n << ARMV8_PMU_PMCR_N_SHIFT); 50 + u64p_replace_bits((__u64 *) pmcr, pmcr_n, ARMV8_PMU_PMCR_N); 52 51 } 53 52 54 53 static uint64_t get_counters_mask(uint64_t n)