Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
perf_event_intel_uncore.h
Go to the documentation of this file.
1 #include <linux/module.h>
2 #include <linux/slab.h>
3 #include <linux/pci.h>
4 #include <linux/perf_event.h>
5 #include "perf_event.h"
6 
7 #define UNCORE_PMU_NAME_LEN 32
8 #define UNCORE_PMU_HRTIMER_INTERVAL (60LL * NSEC_PER_SEC)
9 
10 #define UNCORE_FIXED_EVENT 0xff
11 #define UNCORE_PMC_IDX_MAX_GENERIC 8
12 #define UNCORE_PMC_IDX_FIXED UNCORE_PMC_IDX_MAX_GENERIC
13 #define UNCORE_PMC_IDX_MAX (UNCORE_PMC_IDX_FIXED + 1)
14 
15 #define UNCORE_EVENT_CONSTRAINT(c, n) EVENT_CONSTRAINT(c, n, 0xff)
16 
17 /* SNB event control */
18 #define SNB_UNC_CTL_EV_SEL_MASK 0x000000ff
19 #define SNB_UNC_CTL_UMASK_MASK 0x0000ff00
20 #define SNB_UNC_CTL_EDGE_DET (1 << 18)
21 #define SNB_UNC_CTL_EN (1 << 22)
22 #define SNB_UNC_CTL_INVERT (1 << 23)
23 #define SNB_UNC_CTL_CMASK_MASK 0x1f000000
24 #define NHM_UNC_CTL_CMASK_MASK 0xff000000
25 #define NHM_UNC_FIXED_CTR_CTL_EN (1 << 0)
26 
27 #define SNB_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
28  SNB_UNC_CTL_UMASK_MASK | \
29  SNB_UNC_CTL_EDGE_DET | \
30  SNB_UNC_CTL_INVERT | \
31  SNB_UNC_CTL_CMASK_MASK)
32 
33 #define NHM_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
34  SNB_UNC_CTL_UMASK_MASK | \
35  SNB_UNC_CTL_EDGE_DET | \
36  SNB_UNC_CTL_INVERT | \
37  NHM_UNC_CTL_CMASK_MASK)
38 
39 /* SNB global control register */
40 #define SNB_UNC_PERF_GLOBAL_CTL 0x391
41 #define SNB_UNC_FIXED_CTR_CTRL 0x394
42 #define SNB_UNC_FIXED_CTR 0x395
43 
44 /* SNB uncore global control */
45 #define SNB_UNC_GLOBAL_CTL_CORE_ALL ((1 << 4) - 1)
46 #define SNB_UNC_GLOBAL_CTL_EN (1 << 29)
47 
48 /* SNB Cbo register */
49 #define SNB_UNC_CBO_0_PERFEVTSEL0 0x700
50 #define SNB_UNC_CBO_0_PER_CTR0 0x706
51 #define SNB_UNC_CBO_MSR_OFFSET 0x10
52 
53 /* NHM global control register */
54 #define NHM_UNC_PERF_GLOBAL_CTL 0x391
55 #define NHM_UNC_FIXED_CTR 0x394
56 #define NHM_UNC_FIXED_CTR_CTRL 0x395
57 
58 /* NHM uncore global control */
59 #define NHM_UNC_GLOBAL_CTL_EN_PC_ALL ((1ULL << 8) - 1)
60 #define NHM_UNC_GLOBAL_CTL_EN_FC (1ULL << 32)
61 
62 /* NHM uncore register */
63 #define NHM_UNC_PERFEVTSEL0 0x3c0
64 #define NHM_UNC_UNCORE_PMC0 0x3b0
65 
66 /* SNB-EP Box level control */
67 #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
68 #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
69 #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
70 #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
71 #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
72  SNBEP_PMON_BOX_CTL_RST_CTRS | \
73  SNBEP_PMON_BOX_CTL_FRZ_EN)
74 /* SNB-EP event control */
75 #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
76 #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
77 #define SNBEP_PMON_CTL_RST (1 << 17)
78 #define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
79 #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21) /* only for QPI */
80 #define SNBEP_PMON_CTL_EN (1 << 22)
81 #define SNBEP_PMON_CTL_INVERT (1 << 23)
82 #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
83 #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
84  SNBEP_PMON_CTL_UMASK_MASK | \
85  SNBEP_PMON_CTL_EDGE_DET | \
86  SNBEP_PMON_CTL_INVERT | \
87  SNBEP_PMON_CTL_TRESH_MASK)
88 
89 /* SNB-EP Ubox event control */
90 #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
91 #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
92  (SNBEP_PMON_CTL_EV_SEL_MASK | \
93  SNBEP_PMON_CTL_UMASK_MASK | \
94  SNBEP_PMON_CTL_EDGE_DET | \
95  SNBEP_PMON_CTL_INVERT | \
96  SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
97 
98 #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
99 #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
100  SNBEP_CBO_PMON_CTL_TID_EN)
101 
102 /* SNB-EP PCU event control */
103 #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
104 #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
105 #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
106 #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
107 #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
108  (SNBEP_PMON_CTL_EV_SEL_MASK | \
109  SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
110  SNBEP_PMON_CTL_EDGE_DET | \
111  SNBEP_PMON_CTL_INVERT | \
112  SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
113  SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
114  SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
115 
116 #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
117  (SNBEP_PMON_RAW_EVENT_MASK | \
118  SNBEP_PMON_CTL_EV_SEL_EXT)
119 
120 /* SNB-EP pci control register */
121 #define SNBEP_PCI_PMON_BOX_CTL 0xf4
122 #define SNBEP_PCI_PMON_CTL0 0xd8
123 /* SNB-EP pci counter register */
124 #define SNBEP_PCI_PMON_CTR0 0xa0
125 
126 /* SNB-EP home agent register */
127 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
128 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
129 #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
130 /* SNB-EP memory controller register */
131 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
132 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
133 /* SNB-EP QPI register */
134 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
135 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
136 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
137 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
138 
139 /* SNB-EP Ubox register */
140 #define SNBEP_U_MSR_PMON_CTR0 0xc16
141 #define SNBEP_U_MSR_PMON_CTL0 0xc10
142 
143 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
144 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
145 
146 /* SNB-EP Cbo register */
147 #define SNBEP_C0_MSR_PMON_CTR0 0xd16
148 #define SNBEP_C0_MSR_PMON_CTL0 0xd10
149 #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
150 #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
151 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_MASK 0xfffffc1f
152 #define SNBEP_CBO_MSR_OFFSET 0x20
153 
154 /* SNB-EP PCU register */
155 #define SNBEP_PCU_MSR_PMON_CTR0 0xc36
156 #define SNBEP_PCU_MSR_PMON_CTL0 0xc30
157 #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
158 #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
159 #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
160 #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
161 #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
162 
163 /* NHM-EX event control */
164 #define NHMEX_PMON_CTL_EV_SEL_MASK 0x000000ff
165 #define NHMEX_PMON_CTL_UMASK_MASK 0x0000ff00
166 #define NHMEX_PMON_CTL_EN_BIT0 (1 << 0)
167 #define NHMEX_PMON_CTL_EDGE_DET (1 << 18)
168 #define NHMEX_PMON_CTL_PMI_EN (1 << 20)
169 #define NHMEX_PMON_CTL_EN_BIT22 (1 << 22)
170 #define NHMEX_PMON_CTL_INVERT (1 << 23)
171 #define NHMEX_PMON_CTL_TRESH_MASK 0xff000000
172 #define NHMEX_PMON_RAW_EVENT_MASK (NHMEX_PMON_CTL_EV_SEL_MASK | \
173  NHMEX_PMON_CTL_UMASK_MASK | \
174  NHMEX_PMON_CTL_EDGE_DET | \
175  NHMEX_PMON_CTL_INVERT | \
176  NHMEX_PMON_CTL_TRESH_MASK)
177 
178 /* NHM-EX Ubox */
179 #define NHMEX_U_MSR_PMON_GLOBAL_CTL 0xc00
180 #define NHMEX_U_MSR_PMON_CTR 0xc11
181 #define NHMEX_U_MSR_PMON_EV_SEL 0xc10
182 
183 #define NHMEX_U_PMON_GLOBAL_EN (1 << 0)
184 #define NHMEX_U_PMON_GLOBAL_PMI_CORE_SEL 0x0000001e
185 #define NHMEX_U_PMON_GLOBAL_EN_ALL (1 << 28)
186 #define NHMEX_U_PMON_GLOBAL_RST_ALL (1 << 29)
187 #define NHMEX_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
188 
189 #define NHMEX_U_PMON_RAW_EVENT_MASK \
190  (NHMEX_PMON_CTL_EV_SEL_MASK | \
191  NHMEX_PMON_CTL_EDGE_DET)
192 
193 /* NHM-EX Cbox */
194 #define NHMEX_C0_MSR_PMON_GLOBAL_CTL 0xd00
195 #define NHMEX_C0_MSR_PMON_CTR0 0xd11
196 #define NHMEX_C0_MSR_PMON_EV_SEL0 0xd10
197 #define NHMEX_C_MSR_OFFSET 0x20
198 
199 /* NHM-EX Bbox */
200 #define NHMEX_B0_MSR_PMON_GLOBAL_CTL 0xc20
201 #define NHMEX_B0_MSR_PMON_CTR0 0xc31
202 #define NHMEX_B0_MSR_PMON_CTL0 0xc30
203 #define NHMEX_B_MSR_OFFSET 0x40
204 #define NHMEX_B0_MSR_MATCH 0xe45
205 #define NHMEX_B0_MSR_MASK 0xe46
206 #define NHMEX_B1_MSR_MATCH 0xe4d
207 #define NHMEX_B1_MSR_MASK 0xe4e
208 
209 #define NHMEX_B_PMON_CTL_EN (1 << 0)
210 #define NHMEX_B_PMON_CTL_EV_SEL_SHIFT 1
211 #define NHMEX_B_PMON_CTL_EV_SEL_MASK \
212  (0x1f << NHMEX_B_PMON_CTL_EV_SEL_SHIFT)
213 #define NHMEX_B_PMON_CTR_SHIFT 6
214 #define NHMEX_B_PMON_CTR_MASK \
215  (0x3 << NHMEX_B_PMON_CTR_SHIFT)
216 #define NHMEX_B_PMON_RAW_EVENT_MASK \
217  (NHMEX_B_PMON_CTL_EV_SEL_MASK | \
218  NHMEX_B_PMON_CTR_MASK)
219 
220 /* NHM-EX Sbox */
221 #define NHMEX_S0_MSR_PMON_GLOBAL_CTL 0xc40
222 #define NHMEX_S0_MSR_PMON_CTR0 0xc51
223 #define NHMEX_S0_MSR_PMON_CTL0 0xc50
224 #define NHMEX_S_MSR_OFFSET 0x80
225 #define NHMEX_S0_MSR_MM_CFG 0xe48
226 #define NHMEX_S0_MSR_MATCH 0xe49
227 #define NHMEX_S0_MSR_MASK 0xe4a
228 #define NHMEX_S1_MSR_MM_CFG 0xe58
229 #define NHMEX_S1_MSR_MATCH 0xe59
230 #define NHMEX_S1_MSR_MASK 0xe5a
231 
232 #define NHMEX_S_PMON_MM_CFG_EN (0x1ULL << 63)
233 #define NHMEX_S_EVENT_TO_R_PROG_EV 0
234 
235 /* NHM-EX Mbox */
236 #define NHMEX_M0_MSR_GLOBAL_CTL 0xca0
237 #define NHMEX_M0_MSR_PMU_DSP 0xca5
238 #define NHMEX_M0_MSR_PMU_ISS 0xca6
239 #define NHMEX_M0_MSR_PMU_MAP 0xca7
240 #define NHMEX_M0_MSR_PMU_MSC_THR 0xca8
241 #define NHMEX_M0_MSR_PMU_PGT 0xca9
242 #define NHMEX_M0_MSR_PMU_PLD 0xcaa
243 #define NHMEX_M0_MSR_PMU_ZDP_CTL_FVC 0xcab
244 #define NHMEX_M0_MSR_PMU_CTL0 0xcb0
245 #define NHMEX_M0_MSR_PMU_CNT0 0xcb1
246 #define NHMEX_M_MSR_OFFSET 0x40
247 #define NHMEX_M0_MSR_PMU_MM_CFG 0xe54
248 #define NHMEX_M1_MSR_PMU_MM_CFG 0xe5c
249 
250 #define NHMEX_M_PMON_MM_CFG_EN (1ULL << 63)
251 #define NHMEX_M_PMON_ADDR_MATCH_MASK 0x3ffffffffULL
252 #define NHMEX_M_PMON_ADDR_MASK_MASK 0x7ffffffULL
253 #define NHMEX_M_PMON_ADDR_MASK_SHIFT 34
254 
255 #define NHMEX_M_PMON_CTL_EN (1 << 0)
256 #define NHMEX_M_PMON_CTL_PMI_EN (1 << 1)
257 #define NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT 2
258 #define NHMEX_M_PMON_CTL_COUNT_MODE_MASK \
259  (0x3 << NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT)
260 #define NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT 4
261 #define NHMEX_M_PMON_CTL_STORAGE_MODE_MASK \
262  (0x3 << NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT)
263 #define NHMEX_M_PMON_CTL_WRAP_MODE (1 << 6)
264 #define NHMEX_M_PMON_CTL_FLAG_MODE (1 << 7)
265 #define NHMEX_M_PMON_CTL_INC_SEL_SHIFT 9
266 #define NHMEX_M_PMON_CTL_INC_SEL_MASK \
267  (0x1f << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
268 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT 19
269 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK \
270  (0x7 << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT)
271 #define NHMEX_M_PMON_RAW_EVENT_MASK \
272  (NHMEX_M_PMON_CTL_COUNT_MODE_MASK | \
273  NHMEX_M_PMON_CTL_STORAGE_MODE_MASK | \
274  NHMEX_M_PMON_CTL_WRAP_MODE | \
275  NHMEX_M_PMON_CTL_FLAG_MODE | \
276  NHMEX_M_PMON_CTL_INC_SEL_MASK | \
277  NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK)
278 
279 #define NHMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 11) - 1) | (1 << 23))
280 #define NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7 << (11 + 3 * (n)))
281 
282 #define WSMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 12) - 1) | (1 << 24))
283 #define WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7 << (12 + 3 * (n)))
284 
285 /*
286  * use the 9~13 bits to select event If the 7th bit is not set,
287  * otherwise use the 19~21 bits to select event.
288  */
289 #define MBOX_INC_SEL(x) ((x) << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
290 #define MBOX_SET_FLAG_SEL(x) (((x) << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT) | \
291  NHMEX_M_PMON_CTL_FLAG_MODE)
292 #define MBOX_INC_SEL_MASK (NHMEX_M_PMON_CTL_INC_SEL_MASK | \
293  NHMEX_M_PMON_CTL_FLAG_MODE)
294 #define MBOX_SET_FLAG_SEL_MASK (NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK | \
295  NHMEX_M_PMON_CTL_FLAG_MODE)
296 #define MBOX_INC_SEL_EXTAR_REG(c, r) \
297  EVENT_EXTRA_REG(MBOX_INC_SEL(c), NHMEX_M0_MSR_PMU_##r, \
298  MBOX_INC_SEL_MASK, (u64)-1, NHMEX_M_##r)
299 #define MBOX_SET_FLAG_SEL_EXTRA_REG(c, r) \
300  EVENT_EXTRA_REG(MBOX_SET_FLAG_SEL(c), NHMEX_M0_MSR_PMU_##r, \
301  MBOX_SET_FLAG_SEL_MASK, \
302  (u64)-1, NHMEX_M_##r)
303 
304 /* NHM-EX Rbox */
305 #define NHMEX_R_MSR_GLOBAL_CTL 0xe00
306 #define NHMEX_R_MSR_PMON_CTL0 0xe10
307 #define NHMEX_R_MSR_PMON_CNT0 0xe11
308 #define NHMEX_R_MSR_OFFSET 0x20
309 
310 #define NHMEX_R_MSR_PORTN_QLX_CFG(n) \
311  ((n) < 4 ? (0xe0c + (n)) : (0xe2c + (n) - 4))
312 #define NHMEX_R_MSR_PORTN_IPERF_CFG0(n) (0xe04 + (n))
313 #define NHMEX_R_MSR_PORTN_IPERF_CFG1(n) (0xe24 + (n))
314 #define NHMEX_R_MSR_PORTN_XBR_OFFSET(n) \
315  (((n) < 4 ? 0 : 0x10) + (n) * 4)
316 #define NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) \
317  (0xe60 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
318 #define NHMEX_R_MSR_PORTN_XBR_SET1_MATCH(n) \
319  (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 1)
320 #define NHMEX_R_MSR_PORTN_XBR_SET1_MASK(n) \
321  (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 2)
322 #define NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) \
323  (0xe70 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
324 #define NHMEX_R_MSR_PORTN_XBR_SET2_MATCH(n) \
325  (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 1)
326 #define NHMEX_R_MSR_PORTN_XBR_SET2_MASK(n) \
327  (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 2)
328 
329 #define NHMEX_R_PMON_CTL_EN (1 << 0)
330 #define NHMEX_R_PMON_CTL_EV_SEL_SHIFT 1
331 #define NHMEX_R_PMON_CTL_EV_SEL_MASK \
332  (0x1f << NHMEX_R_PMON_CTL_EV_SEL_SHIFT)
333 #define NHMEX_R_PMON_CTL_PMI_EN (1 << 6)
334 #define NHMEX_R_PMON_RAW_EVENT_MASK NHMEX_R_PMON_CTL_EV_SEL_MASK
335 
336 /* NHM-EX Wbox */
337 #define NHMEX_W_MSR_GLOBAL_CTL 0xc80
338 #define NHMEX_W_MSR_PMON_CNT0 0xc90
339 #define NHMEX_W_MSR_PMON_EVT_SEL0 0xc91
340 #define NHMEX_W_MSR_PMON_FIXED_CTR 0x394
341 #define NHMEX_W_MSR_PMON_FIXED_CTL 0x395
342 
343 #define NHMEX_W_PMON_GLOBAL_FIXED_EN (1ULL << 31)
344 
345 struct intel_uncore_ops;
346 struct intel_uncore_pmu;
347 struct intel_uncore_box;
348 struct uncore_event_desc;
349 
351  const char *name;
356  unsigned perf_ctr;
357  unsigned event_ctl;
358  unsigned event_mask;
359  unsigned fixed_ctr;
360  unsigned fixed_ctl;
361  unsigned box_ctl;
362  unsigned msr_offset;
363  unsigned num_shared_regs:8;
364  unsigned single_fixed:1;
365  unsigned pair_ctr_ctl:1;
366  unsigned *msr_offsets;
372  const struct attribute_group *attr_groups[4];
373 };
374 
375 #define pmu_group attr_groups[0]
376 #define format_group attr_groups[1]
377 #define events_group attr_groups[2]
378 
385  u64 (*read_counter)(struct intel_uncore_box *, struct perf_event *);
386  int (*hw_config)(struct intel_uncore_box *, struct perf_event *);
387  struct event_constraint *(*get_constraint)(struct intel_uncore_box *,
388  struct perf_event *);
390 };
391 
393  struct pmu pmu;
395  int pmu_idx;
396  int func_id;
400 };
401 
406 };
407 
409  int phys_id;
410  int n_active; /* number of active events */
411  int n_events;
412  int cpu; /* cpu to collect events */
413  unsigned long flags;
419  struct pci_dev *pci_dev;
421  struct hrtimer hrtimer;
422  struct list_head list;
424 };
425 
426 #define UNCORE_BOX_FLAG_INITIATED 0
427 
430  const char *config;
431 };
432 
433 #define INTEL_UNCORE_EVENT_DESC(_name, _config) \
434 { \
435  .attr = __ATTR(_name, 0444, uncore_event_show, NULL), \
436  .config = _config, \
437 }
438 
439 #define DEFINE_UNCORE_FORMAT_ATTR(_var, _name, _format) \
440 static ssize_t __uncore_##_var##_show(struct kobject *kobj, \
441  struct kobj_attribute *attr, \
442  char *page) \
443 { \
444  BUILD_BUG_ON(sizeof(_format) >= PAGE_SIZE); \
445  return sprintf(page, _format "\n"); \
446 } \
447 static struct kobj_attribute format_attr_##_var = \
448  __ATTR(_name, 0444, __uncore_##_var##_show, NULL)
449 
450 
451 static ssize_t uncore_event_show(struct kobject *kobj,
452  struct kobj_attribute *attr, char *buf)
453 {
454  struct uncore_event_desc *event =
455  container_of(attr, struct uncore_event_desc, attr);
456  return sprintf(buf, "%s", event->config);
457 }
458 
459 static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box)
460 {
461  return box->pmu->type->box_ctl;
462 }
463 
464 static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box)
465 {
466  return box->pmu->type->fixed_ctl;
467 }
468 
469 static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box)
470 {
471  return box->pmu->type->fixed_ctr;
472 }
473 
474 static inline
475 unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx)
476 {
477  return idx * 4 + box->pmu->type->event_ctl;
478 }
479 
480 static inline
481 unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx)
482 {
483  return idx * 8 + box->pmu->type->perf_ctr;
484 }
485 
486 static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box)
487 {
488  struct intel_uncore_pmu *pmu = box->pmu;
489  return pmu->type->msr_offsets ?
490  pmu->type->msr_offsets[pmu->pmu_idx] :
491  pmu->type->msr_offset * pmu->pmu_idx;
492 }
493 
494 static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box)
495 {
496  if (!box->pmu->type->box_ctl)
497  return 0;
498  return box->pmu->type->box_ctl + uncore_msr_box_offset(box);
499 }
500 
501 static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box)
502 {
503  if (!box->pmu->type->fixed_ctl)
504  return 0;
505  return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box);
506 }
507 
508 static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box)
509 {
510  return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box);
511 }
512 
513 static inline
514 unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx)
515 {
516  return box->pmu->type->event_ctl +
517  (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) +
518  uncore_msr_box_offset(box);
519 }
520 
521 static inline
522 unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx)
523 {
524  return box->pmu->type->perf_ctr +
525  (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) +
526  uncore_msr_box_offset(box);
527 }
528 
529 static inline
530 unsigned uncore_fixed_ctl(struct intel_uncore_box *box)
531 {
532  if (box->pci_dev)
533  return uncore_pci_fixed_ctl(box);
534  else
535  return uncore_msr_fixed_ctl(box);
536 }
537 
538 static inline
539 unsigned uncore_fixed_ctr(struct intel_uncore_box *box)
540 {
541  if (box->pci_dev)
542  return uncore_pci_fixed_ctr(box);
543  else
544  return uncore_msr_fixed_ctr(box);
545 }
546 
547 static inline
548 unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx)
549 {
550  if (box->pci_dev)
551  return uncore_pci_event_ctl(box, idx);
552  else
553  return uncore_msr_event_ctl(box, idx);
554 }
555 
556 static inline
557 unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx)
558 {
559  if (box->pci_dev)
560  return uncore_pci_perf_ctr(box, idx);
561  else
562  return uncore_msr_perf_ctr(box, idx);
563 }
564 
565 static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box)
566 {
567  return box->pmu->type->perf_ctr_bits;
568 }
569 
570 static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box)
571 {
572  return box->pmu->type->fixed_ctr_bits;
573 }
574 
575 static inline int uncore_num_counters(struct intel_uncore_box *box)
576 {
577  return box->pmu->type->num_counters;
578 }
579 
580 static inline void uncore_disable_box(struct intel_uncore_box *box)
581 {
582  if (box->pmu->type->ops->disable_box)
583  box->pmu->type->ops->disable_box(box);
584 }
585 
586 static inline void uncore_enable_box(struct intel_uncore_box *box)
587 {
588  if (box->pmu->type->ops->enable_box)
589  box->pmu->type->ops->enable_box(box);
590 }
591 
592 static inline void uncore_disable_event(struct intel_uncore_box *box,
593  struct perf_event *event)
594 {
595  box->pmu->type->ops->disable_event(box, event);
596 }
597 
598 static inline void uncore_enable_event(struct intel_uncore_box *box,
599  struct perf_event *event)
600 {
601  box->pmu->type->ops->enable_event(box, event);
602 }
603 
604 static inline u64 uncore_read_counter(struct intel_uncore_box *box,
605  struct perf_event *event)
606 {
607  return box->pmu->type->ops->read_counter(box, event);
608 }
609 
610 static inline void uncore_box_init(struct intel_uncore_box *box)
611 {
613  if (box->pmu->type->ops->init_box)
614  box->pmu->type->ops->init_box(box);
615  }
616 }
617 
618 static inline bool uncore_box_is_fake(struct intel_uncore_box *box)
619 {
620  return (box->phys_id < 0);
621 }