15 #include <linux/perf_event.h>
19 #define wrmsrl(msr, val) \
21 unsigned int _msr = (msr); \
23 trace_printk("wrmsrl(%x, %Lx)\n", (unsigned int)(_msr), \
24 (unsigned long long)(_val)); \
25 native_write_msr((_msr), (u32)(_val), (u32)(_val >> 32)); \
72 #define MAX_PEBS_EVENTS 8
113 #define MAX_LBR_ENTRIES 16
173 #define __EVENT_CONSTRAINT(c, n, m, w, o) {\
174 { .idxmsk64 = (n) }, \
181 #define EVENT_CONSTRAINT(c, n, m) \
182 __EVENT_CONSTRAINT(c, n, m, HWEIGHT(n), 0)
205 #define EVENT_CONSTRAINT_OVERLAP(c, n, m) \
206 __EVENT_CONSTRAINT(c, n, m, HWEIGHT(n), 1)
211 #define INTEL_EVENT_CONSTRAINT(c, n) \
212 EVENT_CONSTRAINT(c, n, ARCH_PERFMON_EVENTSEL_EVENT)
225 #define FIXED_EVENT_CONSTRAINT(c, n) \
226 EVENT_CONSTRAINT(c, (1ULL << (32+n)), X86_RAW_EVENT_MASK)
231 #define INTEL_UEVENT_CONSTRAINT(c, n) \
232 EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK)
234 #define EVENT_CONSTRAINT_END \
235 EVENT_CONSTRAINT(0, 0, 0)
237 #define for_each_event_constraint(e, c) \
238 for ((e) = (c); (e)->weight; (e)++)
258 #define EVENT_EXTRA_REG(e, ms, m, vm, i) { \
261 .config_mask = (m), \
262 .valid_mask = (vm), \
263 .idx = EXTRA_REG_##i \
266 #define INTEL_EVENT_EXTRA_REG(event, msr, vm, idx) \
267 EVENT_EXTRA_REG(event, msr, ARCH_PERFMON_EVENTSEL_EVENT, vm, idx)
269 #define EVENT_EXTRA_END EVENT_EXTRA_REG(0, 0, 0, 0, RSP_0)
308 #define X86_CONFIG(args...) ((union x86_pmu_config){.bits = {args}}).value
405 struct perf_guest_switch_msr *(*guest_get_msrs)(
int *
nr);
408 #define x86_add_quirk(func_) \
410 static struct x86_pmu_quirk __quirk __initdata = { \
413 __quirk.next = x86_pmu.quirks; \
414 x86_pmu.quirks = &__quirk; \
417 #define ERF_NO_HT_SHARING 1
418 #define ERF_HAS_RSP_1 2
434 #define C(x) PERF_COUNT_HW_CACHE_##x
447 static inline int x86_pmu_addr_offset(
int index)
461 static inline unsigned int x86_pmu_config_addr(
int index)
466 static inline unsigned int x86_pmu_event_addr(
int index)
477 static inline void __x86_pmu_enable_event(
struct hw_perf_event *hwc,
482 if (hwc->extra_reg.reg)
483 wrmsrl(hwc->extra_reg.reg, hwc->extra_reg.config);
484 wrmsrl(hwc->config_base, (hwc->config | enable_mask) & ~disable_mask);
490 int wmin,
int wmax,
int *assign);
499 wrmsrl(hwc->config_base, hwc->config);
510 static inline bool kernel_ip(
unsigned long ip)
531 static inline void set_linear_ip(
struct pt_regs *
regs,
unsigned long ip)
534 if (regs->
flags & X86_VM_MASK)
535 regs->
flags ^= (PERF_EFLAGS_VM | X86_VM_MASK);
539 #ifdef CONFIG_CPU_SUP_AMD
552 #ifdef CONFIG_CPU_SUP_INTEL