1 /* SandyBridge-EP/IvyTown uncore support */
2 #include "perf_event_intel_uncore.h"
5 /* SNB-EP Box level control */
6 #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
7 #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
8 #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
9 #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
10 #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
11 SNBEP_PMON_BOX_CTL_RST_CTRS | \
12 SNBEP_PMON_BOX_CTL_FRZ_EN)
13 /* SNB-EP event control */
14 #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
15 #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
16 #define SNBEP_PMON_CTL_RST (1 << 17)
17 #define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
18 #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21)
19 #define SNBEP_PMON_CTL_EN (1 << 22)
20 #define SNBEP_PMON_CTL_INVERT (1 << 23)
21 #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
22 #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
23 SNBEP_PMON_CTL_UMASK_MASK | \
24 SNBEP_PMON_CTL_EDGE_DET | \
25 SNBEP_PMON_CTL_INVERT | \
26 SNBEP_PMON_CTL_TRESH_MASK)
28 /* SNB-EP Ubox event control */
29 #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
30 #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
31 (SNBEP_PMON_CTL_EV_SEL_MASK | \
32 SNBEP_PMON_CTL_UMASK_MASK | \
33 SNBEP_PMON_CTL_EDGE_DET | \
34 SNBEP_PMON_CTL_INVERT | \
35 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
37 #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
38 #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
39 SNBEP_CBO_PMON_CTL_TID_EN)
41 /* SNB-EP PCU event control */
42 #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
43 #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
44 #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
45 #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
46 #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
47 (SNBEP_PMON_CTL_EV_SEL_MASK | \
48 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
49 SNBEP_PMON_CTL_EDGE_DET | \
50 SNBEP_PMON_CTL_EV_SEL_EXT | \
51 SNBEP_PMON_CTL_INVERT | \
52 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
53 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
54 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
56 #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
57 (SNBEP_PMON_RAW_EVENT_MASK | \
58 SNBEP_PMON_CTL_EV_SEL_EXT)
60 /* SNB-EP pci control register */
61 #define SNBEP_PCI_PMON_BOX_CTL 0xf4
62 #define SNBEP_PCI_PMON_CTL0 0xd8
63 /* SNB-EP pci counter register */
64 #define SNBEP_PCI_PMON_CTR0 0xa0
66 /* SNB-EP home agent register */
67 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
68 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
69 #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
70 /* SNB-EP memory controller register */
71 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
72 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
73 /* SNB-EP QPI register */
74 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
75 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
76 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
77 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
79 /* SNB-EP Ubox register */
80 #define SNBEP_U_MSR_PMON_CTR0 0xc16
81 #define SNBEP_U_MSR_PMON_CTL0 0xc10
83 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
84 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
86 /* SNB-EP Cbo register */
87 #define SNBEP_C0_MSR_PMON_CTR0 0xd16
88 #define SNBEP_C0_MSR_PMON_CTL0 0xd10
89 #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
90 #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
91 #define SNBEP_CBO_MSR_OFFSET 0x20
93 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f
94 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00
95 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000
96 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000
98 #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \
100 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
101 .config_mask = (m), \
105 /* SNB-EP PCU register */
106 #define SNBEP_PCU_MSR_PMON_CTR0 0xc36
107 #define SNBEP_PCU_MSR_PMON_CTL0 0xc30
108 #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
109 #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
110 #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
111 #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
112 #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
114 /* IVBEP event control */
115 #define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
116 SNBEP_PMON_BOX_CTL_RST_CTRS)
117 #define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
118 SNBEP_PMON_CTL_UMASK_MASK | \
119 SNBEP_PMON_CTL_EDGE_DET | \
120 SNBEP_PMON_CTL_TRESH_MASK)
122 #define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00
123 #define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
124 #define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29)
126 #define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \
127 (SNBEP_PMON_CTL_EV_SEL_MASK | \
128 SNBEP_PMON_CTL_UMASK_MASK | \
129 SNBEP_PMON_CTL_EDGE_DET | \
130 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
132 #define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \
133 SNBEP_CBO_PMON_CTL_TID_EN)
135 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0)
136 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5)
137 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17)
138 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
139 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
140 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
141 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
142 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
144 /* IVBEP home agent */
145 #define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16)
146 #define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \
147 (IVBEP_PMON_RAW_EVENT_MASK | \
148 IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST)
150 #define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
151 (SNBEP_PMON_CTL_EV_SEL_MASK | \
152 SNBEP_PMON_CTL_EV_SEL_EXT | \
153 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
154 SNBEP_PMON_CTL_EDGE_DET | \
155 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
156 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
157 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
159 #define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
160 (IVBEP_PMON_RAW_EVENT_MASK | \
161 SNBEP_PMON_CTL_EV_SEL_EXT)
163 #define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \
164 ((1ULL << (n)) - 1)))
166 /* Haswell-EP Ubox */
167 #define HSWEP_U_MSR_PMON_CTR0 0x709
168 #define HSWEP_U_MSR_PMON_CTL0 0x705
169 #define HSWEP_U_MSR_PMON_FILTER 0x707
171 #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703
172 #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704
174 #define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0)
175 #define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1)
176 #define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \
177 (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \
178 HSWEP_U_MSR_PMON_BOX_FILTER_CID)
181 #define HSWEP_C0_MSR_PMON_CTR0 0xe08
182 #define HSWEP_C0_MSR_PMON_CTL0 0xe01
183 #define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00
184 #define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05
185 #define HSWEP_CBO_MSR_OFFSET 0x10
188 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0)
189 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6)
190 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17)
191 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
192 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
193 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
194 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
195 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
198 /* Haswell-EP Sbox */
199 #define HSWEP_S0_MSR_PMON_CTR0 0x726
200 #define HSWEP_S0_MSR_PMON_CTL0 0x721
201 #define HSWEP_S0_MSR_PMON_BOX_CTL 0x720
202 #define HSWEP_SBOX_MSR_OFFSET 0xa
203 #define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
204 SNBEP_CBO_PMON_CTL_TID_EN)
207 #define HSWEP_PCU_MSR_PMON_CTR0 0x717
208 #define HSWEP_PCU_MSR_PMON_CTL0 0x711
209 #define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710
210 #define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715
213 DEFINE_UNCORE_FORMAT_ATTR(event
, event
, "config:0-7");
214 DEFINE_UNCORE_FORMAT_ATTR(event_ext
, event
, "config:0-7,21");
215 DEFINE_UNCORE_FORMAT_ATTR(umask
, umask
, "config:8-15");
216 DEFINE_UNCORE_FORMAT_ATTR(edge
, edge
, "config:18");
217 DEFINE_UNCORE_FORMAT_ATTR(tid_en
, tid_en
, "config:19");
218 DEFINE_UNCORE_FORMAT_ATTR(inv
, inv
, "config:23");
219 DEFINE_UNCORE_FORMAT_ATTR(thresh8
, thresh
, "config:24-31");
220 DEFINE_UNCORE_FORMAT_ATTR(thresh5
, thresh
, "config:24-28");
221 DEFINE_UNCORE_FORMAT_ATTR(occ_sel
, occ_sel
, "config:14-15");
222 DEFINE_UNCORE_FORMAT_ATTR(occ_invert
, occ_invert
, "config:30");
223 DEFINE_UNCORE_FORMAT_ATTR(occ_edge
, occ_edge
, "config:14-51");
224 DEFINE_UNCORE_FORMAT_ATTR(filter_tid
, filter_tid
, "config1:0-4");
225 DEFINE_UNCORE_FORMAT_ATTR(filter_tid2
, filter_tid
, "config1:0");
226 DEFINE_UNCORE_FORMAT_ATTR(filter_tid3
, filter_tid
, "config1:0-5");
227 DEFINE_UNCORE_FORMAT_ATTR(filter_cid
, filter_cid
, "config1:5");
228 DEFINE_UNCORE_FORMAT_ATTR(filter_link
, filter_link
, "config1:5-8");
229 DEFINE_UNCORE_FORMAT_ATTR(filter_link2
, filter_link
, "config1:6-8");
230 DEFINE_UNCORE_FORMAT_ATTR(filter_nid
, filter_nid
, "config1:10-17");
231 DEFINE_UNCORE_FORMAT_ATTR(filter_nid2
, filter_nid
, "config1:32-47");
232 DEFINE_UNCORE_FORMAT_ATTR(filter_state
, filter_state
, "config1:18-22");
233 DEFINE_UNCORE_FORMAT_ATTR(filter_state2
, filter_state
, "config1:17-22");
234 DEFINE_UNCORE_FORMAT_ATTR(filter_state3
, filter_state
, "config1:17-23");
235 DEFINE_UNCORE_FORMAT_ATTR(filter_opc
, filter_opc
, "config1:23-31");
236 DEFINE_UNCORE_FORMAT_ATTR(filter_opc2
, filter_opc
, "config1:52-60");
237 DEFINE_UNCORE_FORMAT_ATTR(filter_nc
, filter_nc
, "config1:62");
238 DEFINE_UNCORE_FORMAT_ATTR(filter_c6
, filter_c6
, "config1:61");
239 DEFINE_UNCORE_FORMAT_ATTR(filter_isoc
, filter_isoc
, "config1:63");
240 DEFINE_UNCORE_FORMAT_ATTR(filter_band0
, filter_band0
, "config1:0-7");
241 DEFINE_UNCORE_FORMAT_ATTR(filter_band1
, filter_band1
, "config1:8-15");
242 DEFINE_UNCORE_FORMAT_ATTR(filter_band2
, filter_band2
, "config1:16-23");
243 DEFINE_UNCORE_FORMAT_ATTR(filter_band3
, filter_band3
, "config1:24-31");
244 DEFINE_UNCORE_FORMAT_ATTR(match_rds
, match_rds
, "config1:48-51");
245 DEFINE_UNCORE_FORMAT_ATTR(match_rnid30
, match_rnid30
, "config1:32-35");
246 DEFINE_UNCORE_FORMAT_ATTR(match_rnid4
, match_rnid4
, "config1:31");
247 DEFINE_UNCORE_FORMAT_ATTR(match_dnid
, match_dnid
, "config1:13-17");
248 DEFINE_UNCORE_FORMAT_ATTR(match_mc
, match_mc
, "config1:9-12");
249 DEFINE_UNCORE_FORMAT_ATTR(match_opc
, match_opc
, "config1:5-8");
250 DEFINE_UNCORE_FORMAT_ATTR(match_vnw
, match_vnw
, "config1:3-4");
251 DEFINE_UNCORE_FORMAT_ATTR(match0
, match0
, "config1:0-31");
252 DEFINE_UNCORE_FORMAT_ATTR(match1
, match1
, "config1:32-63");
253 DEFINE_UNCORE_FORMAT_ATTR(mask_rds
, mask_rds
, "config2:48-51");
254 DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30
, mask_rnid30
, "config2:32-35");
255 DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4
, mask_rnid4
, "config2:31");
256 DEFINE_UNCORE_FORMAT_ATTR(mask_dnid
, mask_dnid
, "config2:13-17");
257 DEFINE_UNCORE_FORMAT_ATTR(mask_mc
, mask_mc
, "config2:9-12");
258 DEFINE_UNCORE_FORMAT_ATTR(mask_opc
, mask_opc
, "config2:5-8");
259 DEFINE_UNCORE_FORMAT_ATTR(mask_vnw
, mask_vnw
, "config2:3-4");
260 DEFINE_UNCORE_FORMAT_ATTR(mask0
, mask0
, "config2:0-31");
261 DEFINE_UNCORE_FORMAT_ATTR(mask1
, mask1
, "config2:32-63");
263 static void snbep_uncore_pci_disable_box(struct intel_uncore_box
*box
)
265 struct pci_dev
*pdev
= box
->pci_dev
;
266 int box_ctl
= uncore_pci_box_ctl(box
);
269 if (!pci_read_config_dword(pdev
, box_ctl
, &config
)) {
270 config
|= SNBEP_PMON_BOX_CTL_FRZ
;
271 pci_write_config_dword(pdev
, box_ctl
, config
);
275 static void snbep_uncore_pci_enable_box(struct intel_uncore_box
*box
)
277 struct pci_dev
*pdev
= box
->pci_dev
;
278 int box_ctl
= uncore_pci_box_ctl(box
);
281 if (!pci_read_config_dword(pdev
, box_ctl
, &config
)) {
282 config
&= ~SNBEP_PMON_BOX_CTL_FRZ
;
283 pci_write_config_dword(pdev
, box_ctl
, config
);
287 static void snbep_uncore_pci_enable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
289 struct pci_dev
*pdev
= box
->pci_dev
;
290 struct hw_perf_event
*hwc
= &event
->hw
;
292 pci_write_config_dword(pdev
, hwc
->config_base
, hwc
->config
| SNBEP_PMON_CTL_EN
);
295 static void snbep_uncore_pci_disable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
297 struct pci_dev
*pdev
= box
->pci_dev
;
298 struct hw_perf_event
*hwc
= &event
->hw
;
300 pci_write_config_dword(pdev
, hwc
->config_base
, hwc
->config
);
303 static u64
snbep_uncore_pci_read_counter(struct intel_uncore_box
*box
, struct perf_event
*event
)
305 struct pci_dev
*pdev
= box
->pci_dev
;
306 struct hw_perf_event
*hwc
= &event
->hw
;
309 pci_read_config_dword(pdev
, hwc
->event_base
, (u32
*)&count
);
310 pci_read_config_dword(pdev
, hwc
->event_base
+ 4, (u32
*)&count
+ 1);
315 static void snbep_uncore_pci_init_box(struct intel_uncore_box
*box
)
317 struct pci_dev
*pdev
= box
->pci_dev
;
319 pci_write_config_dword(pdev
, SNBEP_PCI_PMON_BOX_CTL
, SNBEP_PMON_BOX_CTL_INT
);
322 static void snbep_uncore_msr_disable_box(struct intel_uncore_box
*box
)
327 msr
= uncore_msr_box_ctl(box
);
330 config
|= SNBEP_PMON_BOX_CTL_FRZ
;
335 static void snbep_uncore_msr_enable_box(struct intel_uncore_box
*box
)
340 msr
= uncore_msr_box_ctl(box
);
343 config
&= ~SNBEP_PMON_BOX_CTL_FRZ
;
348 static void snbep_uncore_msr_enable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
350 struct hw_perf_event
*hwc
= &event
->hw
;
351 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
353 if (reg1
->idx
!= EXTRA_REG_NONE
)
354 wrmsrl(reg1
->reg
, uncore_shared_reg_config(box
, 0));
356 wrmsrl(hwc
->config_base
, hwc
->config
| SNBEP_PMON_CTL_EN
);
359 static void snbep_uncore_msr_disable_event(struct intel_uncore_box
*box
,
360 struct perf_event
*event
)
362 struct hw_perf_event
*hwc
= &event
->hw
;
364 wrmsrl(hwc
->config_base
, hwc
->config
);
367 static void snbep_uncore_msr_init_box(struct intel_uncore_box
*box
)
369 unsigned msr
= uncore_msr_box_ctl(box
);
372 wrmsrl(msr
, SNBEP_PMON_BOX_CTL_INT
);
375 static struct attribute
*snbep_uncore_formats_attr
[] = {
376 &format_attr_event
.attr
,
377 &format_attr_umask
.attr
,
378 &format_attr_edge
.attr
,
379 &format_attr_inv
.attr
,
380 &format_attr_thresh8
.attr
,
384 static struct attribute
*snbep_uncore_ubox_formats_attr
[] = {
385 &format_attr_event
.attr
,
386 &format_attr_umask
.attr
,
387 &format_attr_edge
.attr
,
388 &format_attr_inv
.attr
,
389 &format_attr_thresh5
.attr
,
393 static struct attribute
*snbep_uncore_cbox_formats_attr
[] = {
394 &format_attr_event
.attr
,
395 &format_attr_umask
.attr
,
396 &format_attr_edge
.attr
,
397 &format_attr_tid_en
.attr
,
398 &format_attr_inv
.attr
,
399 &format_attr_thresh8
.attr
,
400 &format_attr_filter_tid
.attr
,
401 &format_attr_filter_nid
.attr
,
402 &format_attr_filter_state
.attr
,
403 &format_attr_filter_opc
.attr
,
407 static struct attribute
*snbep_uncore_pcu_formats_attr
[] = {
408 &format_attr_event_ext
.attr
,
409 &format_attr_occ_sel
.attr
,
410 &format_attr_edge
.attr
,
411 &format_attr_inv
.attr
,
412 &format_attr_thresh5
.attr
,
413 &format_attr_occ_invert
.attr
,
414 &format_attr_occ_edge
.attr
,
415 &format_attr_filter_band0
.attr
,
416 &format_attr_filter_band1
.attr
,
417 &format_attr_filter_band2
.attr
,
418 &format_attr_filter_band3
.attr
,
422 static struct attribute
*snbep_uncore_qpi_formats_attr
[] = {
423 &format_attr_event_ext
.attr
,
424 &format_attr_umask
.attr
,
425 &format_attr_edge
.attr
,
426 &format_attr_inv
.attr
,
427 &format_attr_thresh8
.attr
,
428 &format_attr_match_rds
.attr
,
429 &format_attr_match_rnid30
.attr
,
430 &format_attr_match_rnid4
.attr
,
431 &format_attr_match_dnid
.attr
,
432 &format_attr_match_mc
.attr
,
433 &format_attr_match_opc
.attr
,
434 &format_attr_match_vnw
.attr
,
435 &format_attr_match0
.attr
,
436 &format_attr_match1
.attr
,
437 &format_attr_mask_rds
.attr
,
438 &format_attr_mask_rnid30
.attr
,
439 &format_attr_mask_rnid4
.attr
,
440 &format_attr_mask_dnid
.attr
,
441 &format_attr_mask_mc
.attr
,
442 &format_attr_mask_opc
.attr
,
443 &format_attr_mask_vnw
.attr
,
444 &format_attr_mask0
.attr
,
445 &format_attr_mask1
.attr
,
449 static struct uncore_event_desc snbep_uncore_imc_events
[] = {
450 INTEL_UNCORE_EVENT_DESC(clockticks
, "event=0xff,umask=0x00"),
451 INTEL_UNCORE_EVENT_DESC(cas_count_read
, "event=0x04,umask=0x03"),
452 INTEL_UNCORE_EVENT_DESC(cas_count_read
.scale
, "6.103515625e-5"),
453 INTEL_UNCORE_EVENT_DESC(cas_count_read
.unit
, "MiB"),
454 INTEL_UNCORE_EVENT_DESC(cas_count_write
, "event=0x04,umask=0x0c"),
455 INTEL_UNCORE_EVENT_DESC(cas_count_write
.scale
, "6.103515625e-5"),
456 INTEL_UNCORE_EVENT_DESC(cas_count_write
.unit
, "MiB"),
457 { /* end: all zeroes */ },
460 static struct uncore_event_desc snbep_uncore_qpi_events
[] = {
461 INTEL_UNCORE_EVENT_DESC(clockticks
, "event=0x14"),
462 INTEL_UNCORE_EVENT_DESC(txl_flits_active
, "event=0x00,umask=0x06"),
463 INTEL_UNCORE_EVENT_DESC(drs_data
, "event=0x102,umask=0x08"),
464 INTEL_UNCORE_EVENT_DESC(ncb_data
, "event=0x103,umask=0x04"),
465 { /* end: all zeroes */ },
468 static struct attribute_group snbep_uncore_format_group
= {
470 .attrs
= snbep_uncore_formats_attr
,
473 static struct attribute_group snbep_uncore_ubox_format_group
= {
475 .attrs
= snbep_uncore_ubox_formats_attr
,
478 static struct attribute_group snbep_uncore_cbox_format_group
= {
480 .attrs
= snbep_uncore_cbox_formats_attr
,
483 static struct attribute_group snbep_uncore_pcu_format_group
= {
485 .attrs
= snbep_uncore_pcu_formats_attr
,
488 static struct attribute_group snbep_uncore_qpi_format_group
= {
490 .attrs
= snbep_uncore_qpi_formats_attr
,
493 #define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
494 .disable_box = snbep_uncore_msr_disable_box, \
495 .enable_box = snbep_uncore_msr_enable_box, \
496 .disable_event = snbep_uncore_msr_disable_event, \
497 .enable_event = snbep_uncore_msr_enable_event, \
498 .read_counter = uncore_msr_read_counter
500 #define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
501 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \
502 .init_box = snbep_uncore_msr_init_box \
504 static struct intel_uncore_ops snbep_uncore_msr_ops = {
505 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
508 #define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \
509 .init_box = snbep_uncore_pci_init_box, \
510 .disable_box = snbep_uncore_pci_disable_box, \
511 .enable_box = snbep_uncore_pci_enable_box, \
512 .disable_event = snbep_uncore_pci_disable_event, \
513 .read_counter = snbep_uncore_pci_read_counter
515 static struct intel_uncore_ops snbep_uncore_pci_ops
= {
516 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
517 .enable_event
= snbep_uncore_pci_enable_event
, \
520 static struct event_constraint snbep_uncore_cbox_constraints
[] = {
521 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
522 UNCORE_EVENT_CONSTRAINT(0x02, 0x3),
523 UNCORE_EVENT_CONSTRAINT(0x04, 0x3),
524 UNCORE_EVENT_CONSTRAINT(0x05, 0x3),
525 UNCORE_EVENT_CONSTRAINT(0x07, 0x3),
526 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
527 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
528 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
529 UNCORE_EVENT_CONSTRAINT(0x13, 0x3),
530 UNCORE_EVENT_CONSTRAINT(0x1b, 0xc),
531 UNCORE_EVENT_CONSTRAINT(0x1c, 0xc),
532 UNCORE_EVENT_CONSTRAINT(0x1d, 0xc),
533 UNCORE_EVENT_CONSTRAINT(0x1e, 0xc),
534 EVENT_CONSTRAINT_OVERLAP(0x1f, 0xe, 0xff),
535 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
536 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
537 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
538 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
539 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
540 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
541 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
542 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
543 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
544 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
545 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
546 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
550 static struct event_constraint snbep_uncore_r2pcie_constraints
[] = {
551 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
552 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
553 UNCORE_EVENT_CONSTRAINT(0x12, 0x1),
554 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
555 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
556 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
557 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
558 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
559 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
560 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
564 static struct event_constraint snbep_uncore_r3qpi_constraints
[] = {
565 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
566 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
567 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
568 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
569 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
570 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
571 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
572 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
573 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
574 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
575 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
576 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
577 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
578 UNCORE_EVENT_CONSTRAINT(0x2a, 0x3),
579 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
580 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
581 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
582 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
583 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
584 UNCORE_EVENT_CONSTRAINT(0x30, 0x3),
585 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
586 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
587 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
588 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
589 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
590 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
591 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
592 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
596 static struct intel_uncore_type snbep_uncore_ubox
= {
601 .fixed_ctr_bits
= 48,
602 .perf_ctr
= SNBEP_U_MSR_PMON_CTR0
,
603 .event_ctl
= SNBEP_U_MSR_PMON_CTL0
,
604 .event_mask
= SNBEP_U_MSR_PMON_RAW_EVENT_MASK
,
605 .fixed_ctr
= SNBEP_U_MSR_PMON_UCLK_FIXED_CTR
,
606 .fixed_ctl
= SNBEP_U_MSR_PMON_UCLK_FIXED_CTL
,
607 .ops
= &snbep_uncore_msr_ops
,
608 .format_group
= &snbep_uncore_ubox_format_group
,
611 static struct extra_reg snbep_uncore_cbox_extra_regs
[] = {
612 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN
,
613 SNBEP_CBO_PMON_CTL_TID_EN
, 0x1),
614 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
615 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6),
616 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
617 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6),
618 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
619 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6),
620 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6),
621 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8),
622 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8),
623 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa),
624 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa),
625 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2),
626 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2),
627 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2),
628 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2),
629 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8),
630 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8),
631 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa),
632 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa),
633 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2),
634 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2),
635 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2),
636 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2),
640 static void snbep_cbox_put_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
642 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
643 struct intel_uncore_extra_reg
*er
= &box
->shared_regs
[0];
646 if (uncore_box_is_fake(box
))
649 for (i
= 0; i
< 5; i
++) {
650 if (reg1
->alloc
& (0x1 << i
))
651 atomic_sub(1 << (i
* 6), &er
->ref
);
656 static struct event_constraint
*
657 __snbep_cbox_get_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
,
658 u64 (*cbox_filter_mask
)(int fields
))
660 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
661 struct intel_uncore_extra_reg
*er
= &box
->shared_regs
[0];
666 if (reg1
->idx
== EXTRA_REG_NONE
)
669 raw_spin_lock_irqsave(&er
->lock
, flags
);
670 for (i
= 0; i
< 5; i
++) {
671 if (!(reg1
->idx
& (0x1 << i
)))
673 if (!uncore_box_is_fake(box
) && (reg1
->alloc
& (0x1 << i
)))
676 mask
= cbox_filter_mask(0x1 << i
);
677 if (!__BITS_VALUE(atomic_read(&er
->ref
), i
, 6) ||
678 !((reg1
->config
^ er
->config
) & mask
)) {
679 atomic_add(1 << (i
* 6), &er
->ref
);
681 er
->config
|= reg1
->config
& mask
;
687 raw_spin_unlock_irqrestore(&er
->lock
, flags
);
691 if (!uncore_box_is_fake(box
))
692 reg1
->alloc
|= alloc
;
696 for (; i
>= 0; i
--) {
697 if (alloc
& (0x1 << i
))
698 atomic_sub(1 << (i
* 6), &er
->ref
);
700 return &uncore_constraint_empty
;
703 static u64
snbep_cbox_filter_mask(int fields
)
708 mask
|= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID
;
710 mask
|= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID
;
712 mask
|= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE
;
714 mask
|= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC
;
719 static struct event_constraint
*
720 snbep_cbox_get_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
722 return __snbep_cbox_get_constraint(box
, event
, snbep_cbox_filter_mask
);
725 static int snbep_cbox_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
727 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
728 struct extra_reg
*er
;
731 for (er
= snbep_uncore_cbox_extra_regs
; er
->msr
; er
++) {
732 if (er
->event
!= (event
->hw
.config
& er
->config_mask
))
738 reg1
->reg
= SNBEP_C0_MSR_PMON_BOX_FILTER
+
739 SNBEP_CBO_MSR_OFFSET
* box
->pmu
->pmu_idx
;
740 reg1
->config
= event
->attr
.config1
& snbep_cbox_filter_mask(idx
);
746 static struct intel_uncore_ops snbep_uncore_cbox_ops
= {
747 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
748 .hw_config
= snbep_cbox_hw_config
,
749 .get_constraint
= snbep_cbox_get_constraint
,
750 .put_constraint
= snbep_cbox_put_constraint
,
753 static struct intel_uncore_type snbep_uncore_cbox
= {
758 .event_ctl
= SNBEP_C0_MSR_PMON_CTL0
,
759 .perf_ctr
= SNBEP_C0_MSR_PMON_CTR0
,
760 .event_mask
= SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK
,
761 .box_ctl
= SNBEP_C0_MSR_PMON_BOX_CTL
,
762 .msr_offset
= SNBEP_CBO_MSR_OFFSET
,
763 .num_shared_regs
= 1,
764 .constraints
= snbep_uncore_cbox_constraints
,
765 .ops
= &snbep_uncore_cbox_ops
,
766 .format_group
= &snbep_uncore_cbox_format_group
,
769 static u64
snbep_pcu_alter_er(struct perf_event
*event
, int new_idx
, bool modify
)
771 struct hw_perf_event
*hwc
= &event
->hw
;
772 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
773 u64 config
= reg1
->config
;
775 if (new_idx
> reg1
->idx
)
776 config
<<= 8 * (new_idx
- reg1
->idx
);
778 config
>>= 8 * (reg1
->idx
- new_idx
);
781 hwc
->config
+= new_idx
- reg1
->idx
;
782 reg1
->config
= config
;
788 static struct event_constraint
*
789 snbep_pcu_get_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
791 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
792 struct intel_uncore_extra_reg
*er
= &box
->shared_regs
[0];
795 u64 mask
, config1
= reg1
->config
;
798 if (reg1
->idx
== EXTRA_REG_NONE
||
799 (!uncore_box_is_fake(box
) && reg1
->alloc
))
802 mask
= 0xffULL
<< (idx
* 8);
803 raw_spin_lock_irqsave(&er
->lock
, flags
);
804 if (!__BITS_VALUE(atomic_read(&er
->ref
), idx
, 8) ||
805 !((config1
^ er
->config
) & mask
)) {
806 atomic_add(1 << (idx
* 8), &er
->ref
);
808 er
->config
|= config1
& mask
;
811 raw_spin_unlock_irqrestore(&er
->lock
, flags
);
815 if (idx
!= reg1
->idx
) {
816 config1
= snbep_pcu_alter_er(event
, idx
, false);
819 return &uncore_constraint_empty
;
822 if (!uncore_box_is_fake(box
)) {
823 if (idx
!= reg1
->idx
)
824 snbep_pcu_alter_er(event
, idx
, true);
830 static void snbep_pcu_put_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
832 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
833 struct intel_uncore_extra_reg
*er
= &box
->shared_regs
[0];
835 if (uncore_box_is_fake(box
) || !reg1
->alloc
)
838 atomic_sub(1 << (reg1
->idx
* 8), &er
->ref
);
842 static int snbep_pcu_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
844 struct hw_perf_event
*hwc
= &event
->hw
;
845 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
846 int ev_sel
= hwc
->config
& SNBEP_PMON_CTL_EV_SEL_MASK
;
848 if (ev_sel
>= 0xb && ev_sel
<= 0xe) {
849 reg1
->reg
= SNBEP_PCU_MSR_PMON_BOX_FILTER
;
850 reg1
->idx
= ev_sel
- 0xb;
851 reg1
->config
= event
->attr
.config1
& (0xff << (reg1
->idx
* 8));
856 static struct intel_uncore_ops snbep_uncore_pcu_ops
= {
857 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
858 .hw_config
= snbep_pcu_hw_config
,
859 .get_constraint
= snbep_pcu_get_constraint
,
860 .put_constraint
= snbep_pcu_put_constraint
,
863 static struct intel_uncore_type snbep_uncore_pcu
= {
868 .perf_ctr
= SNBEP_PCU_MSR_PMON_CTR0
,
869 .event_ctl
= SNBEP_PCU_MSR_PMON_CTL0
,
870 .event_mask
= SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK
,
871 .box_ctl
= SNBEP_PCU_MSR_PMON_BOX_CTL
,
872 .num_shared_regs
= 1,
873 .ops
= &snbep_uncore_pcu_ops
,
874 .format_group
= &snbep_uncore_pcu_format_group
,
877 static struct intel_uncore_type
*snbep_msr_uncores
[] = {
884 void snbep_uncore_cpu_init(void)
886 if (snbep_uncore_cbox
.num_boxes
> boot_cpu_data
.x86_max_cores
)
887 snbep_uncore_cbox
.num_boxes
= boot_cpu_data
.x86_max_cores
;
888 uncore_msr_uncores
= snbep_msr_uncores
;
892 SNBEP_PCI_QPI_PORT0_FILTER
,
893 SNBEP_PCI_QPI_PORT1_FILTER
,
897 static int snbep_qpi_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
899 struct hw_perf_event
*hwc
= &event
->hw
;
900 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
901 struct hw_perf_event_extra
*reg2
= &hwc
->branch_reg
;
903 if ((hwc
->config
& SNBEP_PMON_CTL_EV_SEL_MASK
) == 0x38) {
905 reg1
->reg
= SNBEP_Q_Py_PCI_PMON_PKT_MATCH0
;
906 reg1
->config
= event
->attr
.config1
;
907 reg2
->reg
= SNBEP_Q_Py_PCI_PMON_PKT_MASK0
;
908 reg2
->config
= event
->attr
.config2
;
913 static void snbep_qpi_enable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
915 struct pci_dev
*pdev
= box
->pci_dev
;
916 struct hw_perf_event
*hwc
= &event
->hw
;
917 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
918 struct hw_perf_event_extra
*reg2
= &hwc
->branch_reg
;
920 if (reg1
->idx
!= EXTRA_REG_NONE
) {
921 int idx
= box
->pmu
->pmu_idx
+ SNBEP_PCI_QPI_PORT0_FILTER
;
922 struct pci_dev
*filter_pdev
= uncore_extra_pci_dev
[box
->phys_id
][idx
];
924 pci_write_config_dword(filter_pdev
, reg1
->reg
,
926 pci_write_config_dword(filter_pdev
, reg1
->reg
+ 4,
927 (u32
)(reg1
->config
>> 32));
928 pci_write_config_dword(filter_pdev
, reg2
->reg
,
930 pci_write_config_dword(filter_pdev
, reg2
->reg
+ 4,
931 (u32
)(reg2
->config
>> 32));
935 pci_write_config_dword(pdev
, hwc
->config_base
, hwc
->config
| SNBEP_PMON_CTL_EN
);
938 static struct intel_uncore_ops snbep_uncore_qpi_ops
= {
939 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
940 .enable_event
= snbep_qpi_enable_event
,
941 .hw_config
= snbep_qpi_hw_config
,
942 .get_constraint
= uncore_get_constraint
,
943 .put_constraint
= uncore_put_constraint
,
946 #define SNBEP_UNCORE_PCI_COMMON_INIT() \
947 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
948 .event_ctl = SNBEP_PCI_PMON_CTL0, \
949 .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \
950 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
951 .ops = &snbep_uncore_pci_ops, \
952 .format_group = &snbep_uncore_format_group
954 static struct intel_uncore_type snbep_uncore_ha
= {
959 SNBEP_UNCORE_PCI_COMMON_INIT(),
962 static struct intel_uncore_type snbep_uncore_imc
= {
967 .fixed_ctr_bits
= 48,
968 .fixed_ctr
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTR
,
969 .fixed_ctl
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTL
,
970 .event_descs
= snbep_uncore_imc_events
,
971 SNBEP_UNCORE_PCI_COMMON_INIT(),
974 static struct intel_uncore_type snbep_uncore_qpi
= {
979 .perf_ctr
= SNBEP_PCI_PMON_CTR0
,
980 .event_ctl
= SNBEP_PCI_PMON_CTL0
,
981 .event_mask
= SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK
,
982 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
983 .num_shared_regs
= 1,
984 .ops
= &snbep_uncore_qpi_ops
,
985 .event_descs
= snbep_uncore_qpi_events
,
986 .format_group
= &snbep_uncore_qpi_format_group
,
990 static struct intel_uncore_type snbep_uncore_r2pcie
= {
995 .constraints
= snbep_uncore_r2pcie_constraints
,
996 SNBEP_UNCORE_PCI_COMMON_INIT(),
999 static struct intel_uncore_type snbep_uncore_r3qpi
= {
1003 .perf_ctr_bits
= 44,
1004 .constraints
= snbep_uncore_r3qpi_constraints
,
1005 SNBEP_UNCORE_PCI_COMMON_INIT(),
1009 SNBEP_PCI_UNCORE_HA
,
1010 SNBEP_PCI_UNCORE_IMC
,
1011 SNBEP_PCI_UNCORE_QPI
,
1012 SNBEP_PCI_UNCORE_R2PCIE
,
1013 SNBEP_PCI_UNCORE_R3QPI
,
1016 static struct intel_uncore_type
*snbep_pci_uncores
[] = {
1017 [SNBEP_PCI_UNCORE_HA
] = &snbep_uncore_ha
,
1018 [SNBEP_PCI_UNCORE_IMC
] = &snbep_uncore_imc
,
1019 [SNBEP_PCI_UNCORE_QPI
] = &snbep_uncore_qpi
,
1020 [SNBEP_PCI_UNCORE_R2PCIE
] = &snbep_uncore_r2pcie
,
1021 [SNBEP_PCI_UNCORE_R3QPI
] = &snbep_uncore_r3qpi
,
1025 static const struct pci_device_id snbep_uncore_pci_ids
[] = {
1027 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_HA
),
1028 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA
, 0),
1030 { /* MC Channel 0 */
1031 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_IMC0
),
1032 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC
, 0),
1034 { /* MC Channel 1 */
1035 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_IMC1
),
1036 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC
, 1),
1038 { /* MC Channel 2 */
1039 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_IMC2
),
1040 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC
, 2),
1042 { /* MC Channel 3 */
1043 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_IMC3
),
1044 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC
, 3),
1047 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_QPI0
),
1048 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI
, 0),
1051 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_QPI1
),
1052 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI
, 1),
1055 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_R2PCIE
),
1056 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE
, 0),
1058 { /* R3QPI Link 0 */
1059 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_R3QPI0
),
1060 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI
, 0),
1062 { /* R3QPI Link 1 */
1063 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, PCI_DEVICE_ID_INTEL_UNC_R3QPI1
),
1064 .driver_data
= UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI
, 1),
1066 { /* QPI Port 0 filter */
1067 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x3c86),
1068 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
1069 SNBEP_PCI_QPI_PORT0_FILTER
),
1071 { /* QPI Port 0 filter */
1072 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x3c96),
1073 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
1074 SNBEP_PCI_QPI_PORT1_FILTER
),
1076 { /* end: all zeroes */ }
1079 static struct pci_driver snbep_uncore_pci_driver
= {
1080 .name
= "snbep_uncore",
1081 .id_table
= snbep_uncore_pci_ids
,
1085 * build pci bus to socket mapping
1087 static int snbep_pci2phy_map_init(int devid
)
1089 struct pci_dev
*ubox_dev
= NULL
;
1090 int i
, bus
, nodeid
, segment
;
1091 struct pci2phy_map
*map
;
1096 /* find the UBOX device */
1097 ubox_dev
= pci_get_device(PCI_VENDOR_ID_INTEL
, devid
, ubox_dev
);
1100 bus
= ubox_dev
->bus
->number
;
1101 /* get the Node ID of the local register */
1102 err
= pci_read_config_dword(ubox_dev
, 0x40, &config
);
1106 /* get the Node ID mapping */
1107 err
= pci_read_config_dword(ubox_dev
, 0x54, &config
);
1111 segment
= pci_domain_nr(ubox_dev
->bus
);
1112 raw_spin_lock(&pci2phy_map_lock
);
1113 map
= __find_pci2phy_map(segment
);
1115 raw_spin_unlock(&pci2phy_map_lock
);
1121 * every three bits in the Node ID mapping register maps
1122 * to a particular node.
1124 for (i
= 0; i
< 8; i
++) {
1125 if (nodeid
== ((config
>> (3 * i
)) & 0x7)) {
1126 map
->pbus_to_physid
[bus
] = i
;
1130 raw_spin_unlock(&pci2phy_map_lock
);
1135 * For PCI bus with no UBOX device, find the next bus
1136 * that has UBOX device and use its mapping.
1138 raw_spin_lock(&pci2phy_map_lock
);
1139 list_for_each_entry(map
, &pci2phy_map_head
, list
) {
1141 for (bus
= 255; bus
>= 0; bus
--) {
1142 if (map
->pbus_to_physid
[bus
] >= 0)
1143 i
= map
->pbus_to_physid
[bus
];
1145 map
->pbus_to_physid
[bus
] = i
;
1148 raw_spin_unlock(&pci2phy_map_lock
);
1151 pci_dev_put(ubox_dev
);
1153 return err
? pcibios_err_to_errno(err
) : 0;
1156 int snbep_uncore_pci_init(void)
1158 int ret
= snbep_pci2phy_map_init(0x3ce0);
1161 uncore_pci_uncores
= snbep_pci_uncores
;
1162 uncore_pci_driver
= &snbep_uncore_pci_driver
;
1165 /* end of Sandy Bridge-EP uncore support */
1167 /* IvyTown uncore support */
1168 static void ivbep_uncore_msr_init_box(struct intel_uncore_box
*box
)
1170 unsigned msr
= uncore_msr_box_ctl(box
);
1172 wrmsrl(msr
, IVBEP_PMON_BOX_CTL_INT
);
1175 static void ivbep_uncore_pci_init_box(struct intel_uncore_box
*box
)
1177 struct pci_dev
*pdev
= box
->pci_dev
;
1179 pci_write_config_dword(pdev
, SNBEP_PCI_PMON_BOX_CTL
, IVBEP_PMON_BOX_CTL_INT
);
1182 #define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \
1183 .init_box = ivbep_uncore_msr_init_box, \
1184 .disable_box = snbep_uncore_msr_disable_box, \
1185 .enable_box = snbep_uncore_msr_enable_box, \
1186 .disable_event = snbep_uncore_msr_disable_event, \
1187 .enable_event = snbep_uncore_msr_enable_event, \
1188 .read_counter = uncore_msr_read_counter
1190 static struct intel_uncore_ops ivbep_uncore_msr_ops
= {
1191 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1194 static struct intel_uncore_ops ivbep_uncore_pci_ops
= {
1195 .init_box
= ivbep_uncore_pci_init_box
,
1196 .disable_box
= snbep_uncore_pci_disable_box
,
1197 .enable_box
= snbep_uncore_pci_enable_box
,
1198 .disable_event
= snbep_uncore_pci_disable_event
,
1199 .enable_event
= snbep_uncore_pci_enable_event
,
1200 .read_counter
= snbep_uncore_pci_read_counter
,
1203 #define IVBEP_UNCORE_PCI_COMMON_INIT() \
1204 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
1205 .event_ctl = SNBEP_PCI_PMON_CTL0, \
1206 .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \
1207 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
1208 .ops = &ivbep_uncore_pci_ops, \
1209 .format_group = &ivbep_uncore_format_group
1211 static struct attribute
*ivbep_uncore_formats_attr
[] = {
1212 &format_attr_event
.attr
,
1213 &format_attr_umask
.attr
,
1214 &format_attr_edge
.attr
,
1215 &format_attr_inv
.attr
,
1216 &format_attr_thresh8
.attr
,
1220 static struct attribute
*ivbep_uncore_ubox_formats_attr
[] = {
1221 &format_attr_event
.attr
,
1222 &format_attr_umask
.attr
,
1223 &format_attr_edge
.attr
,
1224 &format_attr_inv
.attr
,
1225 &format_attr_thresh5
.attr
,
1229 static struct attribute
*ivbep_uncore_cbox_formats_attr
[] = {
1230 &format_attr_event
.attr
,
1231 &format_attr_umask
.attr
,
1232 &format_attr_edge
.attr
,
1233 &format_attr_tid_en
.attr
,
1234 &format_attr_thresh8
.attr
,
1235 &format_attr_filter_tid
.attr
,
1236 &format_attr_filter_link
.attr
,
1237 &format_attr_filter_state2
.attr
,
1238 &format_attr_filter_nid2
.attr
,
1239 &format_attr_filter_opc2
.attr
,
1240 &format_attr_filter_nc
.attr
,
1241 &format_attr_filter_c6
.attr
,
1242 &format_attr_filter_isoc
.attr
,
1246 static struct attribute
*ivbep_uncore_pcu_formats_attr
[] = {
1247 &format_attr_event_ext
.attr
,
1248 &format_attr_occ_sel
.attr
,
1249 &format_attr_edge
.attr
,
1250 &format_attr_thresh5
.attr
,
1251 &format_attr_occ_invert
.attr
,
1252 &format_attr_occ_edge
.attr
,
1253 &format_attr_filter_band0
.attr
,
1254 &format_attr_filter_band1
.attr
,
1255 &format_attr_filter_band2
.attr
,
1256 &format_attr_filter_band3
.attr
,
1260 static struct attribute
*ivbep_uncore_qpi_formats_attr
[] = {
1261 &format_attr_event_ext
.attr
,
1262 &format_attr_umask
.attr
,
1263 &format_attr_edge
.attr
,
1264 &format_attr_thresh8
.attr
,
1265 &format_attr_match_rds
.attr
,
1266 &format_attr_match_rnid30
.attr
,
1267 &format_attr_match_rnid4
.attr
,
1268 &format_attr_match_dnid
.attr
,
1269 &format_attr_match_mc
.attr
,
1270 &format_attr_match_opc
.attr
,
1271 &format_attr_match_vnw
.attr
,
1272 &format_attr_match0
.attr
,
1273 &format_attr_match1
.attr
,
1274 &format_attr_mask_rds
.attr
,
1275 &format_attr_mask_rnid30
.attr
,
1276 &format_attr_mask_rnid4
.attr
,
1277 &format_attr_mask_dnid
.attr
,
1278 &format_attr_mask_mc
.attr
,
1279 &format_attr_mask_opc
.attr
,
1280 &format_attr_mask_vnw
.attr
,
1281 &format_attr_mask0
.attr
,
1282 &format_attr_mask1
.attr
,
1286 static struct attribute_group ivbep_uncore_format_group
= {
1288 .attrs
= ivbep_uncore_formats_attr
,
1291 static struct attribute_group ivbep_uncore_ubox_format_group
= {
1293 .attrs
= ivbep_uncore_ubox_formats_attr
,
1296 static struct attribute_group ivbep_uncore_cbox_format_group
= {
1298 .attrs
= ivbep_uncore_cbox_formats_attr
,
1301 static struct attribute_group ivbep_uncore_pcu_format_group
= {
1303 .attrs
= ivbep_uncore_pcu_formats_attr
,
1306 static struct attribute_group ivbep_uncore_qpi_format_group
= {
1308 .attrs
= ivbep_uncore_qpi_formats_attr
,
1311 static struct intel_uncore_type ivbep_uncore_ubox
= {
1315 .perf_ctr_bits
= 44,
1316 .fixed_ctr_bits
= 48,
1317 .perf_ctr
= SNBEP_U_MSR_PMON_CTR0
,
1318 .event_ctl
= SNBEP_U_MSR_PMON_CTL0
,
1319 .event_mask
= IVBEP_U_MSR_PMON_RAW_EVENT_MASK
,
1320 .fixed_ctr
= SNBEP_U_MSR_PMON_UCLK_FIXED_CTR
,
1321 .fixed_ctl
= SNBEP_U_MSR_PMON_UCLK_FIXED_CTL
,
1322 .ops
= &ivbep_uncore_msr_ops
,
1323 .format_group
= &ivbep_uncore_ubox_format_group
,
1326 static struct extra_reg ivbep_uncore_cbox_extra_regs
[] = {
1327 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN
,
1328 SNBEP_CBO_PMON_CTL_TID_EN
, 0x1),
1329 SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2),
1330 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
1331 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc),
1332 SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc),
1333 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
1334 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc),
1335 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
1336 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc),
1337 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
1338 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc),
1339 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10),
1340 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
1341 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
1342 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
1343 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
1344 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
1345 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
1346 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
1347 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
1348 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
1349 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
1350 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
1351 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
1352 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
1353 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
1354 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
1355 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
1356 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
1357 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
1358 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
1359 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
1360 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
1361 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
1362 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
1363 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
1367 static u64
ivbep_cbox_filter_mask(int fields
)
1372 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID
;
1374 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK
;
1376 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE
;
1378 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID
;
1379 if (fields
& 0x10) {
1380 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC
;
1381 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC
;
1382 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6
;
1383 mask
|= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC
;
1389 static struct event_constraint
*
1390 ivbep_cbox_get_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
1392 return __snbep_cbox_get_constraint(box
, event
, ivbep_cbox_filter_mask
);
1395 static int ivbep_cbox_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
1397 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
1398 struct extra_reg
*er
;
1401 for (er
= ivbep_uncore_cbox_extra_regs
; er
->msr
; er
++) {
1402 if (er
->event
!= (event
->hw
.config
& er
->config_mask
))
1408 reg1
->reg
= SNBEP_C0_MSR_PMON_BOX_FILTER
+
1409 SNBEP_CBO_MSR_OFFSET
* box
->pmu
->pmu_idx
;
1410 reg1
->config
= event
->attr
.config1
& ivbep_cbox_filter_mask(idx
);
1416 static void ivbep_cbox_enable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
1418 struct hw_perf_event
*hwc
= &event
->hw
;
1419 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
1421 if (reg1
->idx
!= EXTRA_REG_NONE
) {
1422 u64 filter
= uncore_shared_reg_config(box
, 0);
1423 wrmsrl(reg1
->reg
, filter
& 0xffffffff);
1424 wrmsrl(reg1
->reg
+ 6, filter
>> 32);
1427 wrmsrl(hwc
->config_base
, hwc
->config
| SNBEP_PMON_CTL_EN
);
1430 static struct intel_uncore_ops ivbep_uncore_cbox_ops
= {
1431 .init_box
= ivbep_uncore_msr_init_box
,
1432 .disable_box
= snbep_uncore_msr_disable_box
,
1433 .enable_box
= snbep_uncore_msr_enable_box
,
1434 .disable_event
= snbep_uncore_msr_disable_event
,
1435 .enable_event
= ivbep_cbox_enable_event
,
1436 .read_counter
= uncore_msr_read_counter
,
1437 .hw_config
= ivbep_cbox_hw_config
,
1438 .get_constraint
= ivbep_cbox_get_constraint
,
1439 .put_constraint
= snbep_cbox_put_constraint
,
1442 static struct intel_uncore_type ivbep_uncore_cbox
= {
1446 .perf_ctr_bits
= 44,
1447 .event_ctl
= SNBEP_C0_MSR_PMON_CTL0
,
1448 .perf_ctr
= SNBEP_C0_MSR_PMON_CTR0
,
1449 .event_mask
= IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK
,
1450 .box_ctl
= SNBEP_C0_MSR_PMON_BOX_CTL
,
1451 .msr_offset
= SNBEP_CBO_MSR_OFFSET
,
1452 .num_shared_regs
= 1,
1453 .constraints
= snbep_uncore_cbox_constraints
,
1454 .ops
= &ivbep_uncore_cbox_ops
,
1455 .format_group
= &ivbep_uncore_cbox_format_group
,
1458 static struct intel_uncore_ops ivbep_uncore_pcu_ops
= {
1459 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1460 .hw_config
= snbep_pcu_hw_config
,
1461 .get_constraint
= snbep_pcu_get_constraint
,
1462 .put_constraint
= snbep_pcu_put_constraint
,
1465 static struct intel_uncore_type ivbep_uncore_pcu
= {
1469 .perf_ctr_bits
= 48,
1470 .perf_ctr
= SNBEP_PCU_MSR_PMON_CTR0
,
1471 .event_ctl
= SNBEP_PCU_MSR_PMON_CTL0
,
1472 .event_mask
= IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK
,
1473 .box_ctl
= SNBEP_PCU_MSR_PMON_BOX_CTL
,
1474 .num_shared_regs
= 1,
1475 .ops
= &ivbep_uncore_pcu_ops
,
1476 .format_group
= &ivbep_uncore_pcu_format_group
,
1479 static struct intel_uncore_type
*ivbep_msr_uncores
[] = {
1486 void ivbep_uncore_cpu_init(void)
1488 if (ivbep_uncore_cbox
.num_boxes
> boot_cpu_data
.x86_max_cores
)
1489 ivbep_uncore_cbox
.num_boxes
= boot_cpu_data
.x86_max_cores
;
1490 uncore_msr_uncores
= ivbep_msr_uncores
;
1493 static struct intel_uncore_type ivbep_uncore_ha
= {
1497 .perf_ctr_bits
= 48,
1498 IVBEP_UNCORE_PCI_COMMON_INIT(),
1501 static struct intel_uncore_type ivbep_uncore_imc
= {
1505 .perf_ctr_bits
= 48,
1506 .fixed_ctr_bits
= 48,
1507 .fixed_ctr
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTR
,
1508 .fixed_ctl
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTL
,
1509 .event_descs
= snbep_uncore_imc_events
,
1510 IVBEP_UNCORE_PCI_COMMON_INIT(),
1513 /* registers in IRP boxes are not properly aligned */
1514 static unsigned ivbep_uncore_irp_ctls
[] = {0xd8, 0xdc, 0xe0, 0xe4};
1515 static unsigned ivbep_uncore_irp_ctrs
[] = {0xa0, 0xb0, 0xb8, 0xc0};
1517 static void ivbep_uncore_irp_enable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
1519 struct pci_dev
*pdev
= box
->pci_dev
;
1520 struct hw_perf_event
*hwc
= &event
->hw
;
1522 pci_write_config_dword(pdev
, ivbep_uncore_irp_ctls
[hwc
->idx
],
1523 hwc
->config
| SNBEP_PMON_CTL_EN
);
1526 static void ivbep_uncore_irp_disable_event(struct intel_uncore_box
*box
, struct perf_event
*event
)
1528 struct pci_dev
*pdev
= box
->pci_dev
;
1529 struct hw_perf_event
*hwc
= &event
->hw
;
1531 pci_write_config_dword(pdev
, ivbep_uncore_irp_ctls
[hwc
->idx
], hwc
->config
);
1534 static u64
ivbep_uncore_irp_read_counter(struct intel_uncore_box
*box
, struct perf_event
*event
)
1536 struct pci_dev
*pdev
= box
->pci_dev
;
1537 struct hw_perf_event
*hwc
= &event
->hw
;
1540 pci_read_config_dword(pdev
, ivbep_uncore_irp_ctrs
[hwc
->idx
], (u32
*)&count
);
1541 pci_read_config_dword(pdev
, ivbep_uncore_irp_ctrs
[hwc
->idx
] + 4, (u32
*)&count
+ 1);
1546 static struct intel_uncore_ops ivbep_uncore_irp_ops
= {
1547 .init_box
= ivbep_uncore_pci_init_box
,
1548 .disable_box
= snbep_uncore_pci_disable_box
,
1549 .enable_box
= snbep_uncore_pci_enable_box
,
1550 .disable_event
= ivbep_uncore_irp_disable_event
,
1551 .enable_event
= ivbep_uncore_irp_enable_event
,
1552 .read_counter
= ivbep_uncore_irp_read_counter
,
1555 static struct intel_uncore_type ivbep_uncore_irp
= {
1559 .perf_ctr_bits
= 48,
1560 .event_mask
= IVBEP_PMON_RAW_EVENT_MASK
,
1561 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
1562 .ops
= &ivbep_uncore_irp_ops
,
1563 .format_group
= &ivbep_uncore_format_group
,
1566 static struct intel_uncore_ops ivbep_uncore_qpi_ops
= {
1567 .init_box
= ivbep_uncore_pci_init_box
,
1568 .disable_box
= snbep_uncore_pci_disable_box
,
1569 .enable_box
= snbep_uncore_pci_enable_box
,
1570 .disable_event
= snbep_uncore_pci_disable_event
,
1571 .enable_event
= snbep_qpi_enable_event
,
1572 .read_counter
= snbep_uncore_pci_read_counter
,
1573 .hw_config
= snbep_qpi_hw_config
,
1574 .get_constraint
= uncore_get_constraint
,
1575 .put_constraint
= uncore_put_constraint
,
1578 static struct intel_uncore_type ivbep_uncore_qpi
= {
1582 .perf_ctr_bits
= 48,
1583 .perf_ctr
= SNBEP_PCI_PMON_CTR0
,
1584 .event_ctl
= SNBEP_PCI_PMON_CTL0
,
1585 .event_mask
= IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK
,
1586 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
1587 .num_shared_regs
= 1,
1588 .ops
= &ivbep_uncore_qpi_ops
,
1589 .format_group
= &ivbep_uncore_qpi_format_group
,
1592 static struct intel_uncore_type ivbep_uncore_r2pcie
= {
1596 .perf_ctr_bits
= 44,
1597 .constraints
= snbep_uncore_r2pcie_constraints
,
1598 IVBEP_UNCORE_PCI_COMMON_INIT(),
1601 static struct intel_uncore_type ivbep_uncore_r3qpi
= {
1605 .perf_ctr_bits
= 44,
1606 .constraints
= snbep_uncore_r3qpi_constraints
,
1607 IVBEP_UNCORE_PCI_COMMON_INIT(),
1611 IVBEP_PCI_UNCORE_HA
,
1612 IVBEP_PCI_UNCORE_IMC
,
1613 IVBEP_PCI_UNCORE_IRP
,
1614 IVBEP_PCI_UNCORE_QPI
,
1615 IVBEP_PCI_UNCORE_R2PCIE
,
1616 IVBEP_PCI_UNCORE_R3QPI
,
1619 static struct intel_uncore_type
*ivbep_pci_uncores
[] = {
1620 [IVBEP_PCI_UNCORE_HA
] = &ivbep_uncore_ha
,
1621 [IVBEP_PCI_UNCORE_IMC
] = &ivbep_uncore_imc
,
1622 [IVBEP_PCI_UNCORE_IRP
] = &ivbep_uncore_irp
,
1623 [IVBEP_PCI_UNCORE_QPI
] = &ivbep_uncore_qpi
,
1624 [IVBEP_PCI_UNCORE_R2PCIE
] = &ivbep_uncore_r2pcie
,
1625 [IVBEP_PCI_UNCORE_R3QPI
] = &ivbep_uncore_r3qpi
,
1629 static const struct pci_device_id ivbep_uncore_pci_ids
[] = {
1630 { /* Home Agent 0 */
1631 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe30),
1632 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA
, 0),
1634 { /* Home Agent 1 */
1635 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe38),
1636 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA
, 1),
1638 { /* MC0 Channel 0 */
1639 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xeb4),
1640 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 0),
1642 { /* MC0 Channel 1 */
1643 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xeb5),
1644 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 1),
1646 { /* MC0 Channel 3 */
1647 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xeb0),
1648 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 2),
1650 { /* MC0 Channel 4 */
1651 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xeb1),
1652 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 3),
1654 { /* MC1 Channel 0 */
1655 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xef4),
1656 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 4),
1658 { /* MC1 Channel 1 */
1659 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xef5),
1660 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 5),
1662 { /* MC1 Channel 3 */
1663 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xef0),
1664 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 6),
1666 { /* MC1 Channel 4 */
1667 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xef1),
1668 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC
, 7),
1671 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe39),
1672 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP
, 0),
1675 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe32),
1676 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI
, 0),
1679 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe33),
1680 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI
, 1),
1683 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe3a),
1684 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI
, 2),
1687 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe34),
1688 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE
, 0),
1690 { /* R3QPI0 Link 0 */
1691 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe36),
1692 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI
, 0),
1694 { /* R3QPI0 Link 1 */
1695 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe37),
1696 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI
, 1),
1698 { /* R3QPI1 Link 2 */
1699 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe3e),
1700 .driver_data
= UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI
, 2),
1702 { /* QPI Port 0 filter */
1703 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe86),
1704 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
1705 SNBEP_PCI_QPI_PORT0_FILTER
),
1707 { /* QPI Port 0 filter */
1708 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0xe96),
1709 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
1710 SNBEP_PCI_QPI_PORT1_FILTER
),
1712 { /* end: all zeroes */ }
1715 static struct pci_driver ivbep_uncore_pci_driver
= {
1716 .name
= "ivbep_uncore",
1717 .id_table
= ivbep_uncore_pci_ids
,
1720 int ivbep_uncore_pci_init(void)
1722 int ret
= snbep_pci2phy_map_init(0x0e1e);
1725 uncore_pci_uncores
= ivbep_pci_uncores
;
1726 uncore_pci_driver
= &ivbep_uncore_pci_driver
;
1729 /* end of IvyTown uncore support */
1731 /* Haswell-EP uncore support */
1732 static struct attribute
*hswep_uncore_ubox_formats_attr
[] = {
1733 &format_attr_event
.attr
,
1734 &format_attr_umask
.attr
,
1735 &format_attr_edge
.attr
,
1736 &format_attr_inv
.attr
,
1737 &format_attr_thresh5
.attr
,
1738 &format_attr_filter_tid2
.attr
,
1739 &format_attr_filter_cid
.attr
,
1743 static struct attribute_group hswep_uncore_ubox_format_group
= {
1745 .attrs
= hswep_uncore_ubox_formats_attr
,
1748 static int hswep_ubox_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
1750 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
1751 reg1
->reg
= HSWEP_U_MSR_PMON_FILTER
;
1752 reg1
->config
= event
->attr
.config1
& HSWEP_U_MSR_PMON_BOX_FILTER_MASK
;
1757 static struct intel_uncore_ops hswep_uncore_ubox_ops
= {
1758 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1759 .hw_config
= hswep_ubox_hw_config
,
1760 .get_constraint
= uncore_get_constraint
,
1761 .put_constraint
= uncore_put_constraint
,
1764 static struct intel_uncore_type hswep_uncore_ubox
= {
1768 .perf_ctr_bits
= 44,
1769 .fixed_ctr_bits
= 48,
1770 .perf_ctr
= HSWEP_U_MSR_PMON_CTR0
,
1771 .event_ctl
= HSWEP_U_MSR_PMON_CTL0
,
1772 .event_mask
= SNBEP_U_MSR_PMON_RAW_EVENT_MASK
,
1773 .fixed_ctr
= HSWEP_U_MSR_PMON_UCLK_FIXED_CTR
,
1774 .fixed_ctl
= HSWEP_U_MSR_PMON_UCLK_FIXED_CTL
,
1775 .num_shared_regs
= 1,
1776 .ops
= &hswep_uncore_ubox_ops
,
1777 .format_group
= &hswep_uncore_ubox_format_group
,
1780 static struct attribute
*hswep_uncore_cbox_formats_attr
[] = {
1781 &format_attr_event
.attr
,
1782 &format_attr_umask
.attr
,
1783 &format_attr_edge
.attr
,
1784 &format_attr_tid_en
.attr
,
1785 &format_attr_thresh8
.attr
,
1786 &format_attr_filter_tid3
.attr
,
1787 &format_attr_filter_link2
.attr
,
1788 &format_attr_filter_state3
.attr
,
1789 &format_attr_filter_nid2
.attr
,
1790 &format_attr_filter_opc2
.attr
,
1791 &format_attr_filter_nc
.attr
,
1792 &format_attr_filter_c6
.attr
,
1793 &format_attr_filter_isoc
.attr
,
1797 static struct attribute_group hswep_uncore_cbox_format_group
= {
1799 .attrs
= hswep_uncore_cbox_formats_attr
,
1802 static struct event_constraint hswep_uncore_cbox_constraints
[] = {
1803 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
1804 UNCORE_EVENT_CONSTRAINT(0x09, 0x1),
1805 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
1806 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
1807 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
1808 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
1809 UNCORE_EVENT_CONSTRAINT(0x3e, 0x1),
1810 EVENT_CONSTRAINT_END
1813 static struct extra_reg hswep_uncore_cbox_extra_regs
[] = {
1814 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN
,
1815 SNBEP_CBO_PMON_CTL_TID_EN
, 0x1),
1816 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
1817 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
1818 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
1819 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
1820 SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4),
1821 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4),
1822 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
1823 SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8),
1824 SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8),
1825 SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8),
1826 SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8),
1827 SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8),
1828 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12),
1829 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
1830 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
1831 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
1832 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
1833 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
1834 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
1835 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
1836 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
1837 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
1838 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
1839 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
1840 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
1841 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
1842 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
1843 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
1844 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
1845 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
1846 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
1847 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
1848 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
1849 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
1850 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
1851 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
1855 static u64
hswep_cbox_filter_mask(int fields
)
1859 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID
;
1861 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK
;
1863 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE
;
1865 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID
;
1866 if (fields
& 0x10) {
1867 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC
;
1868 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC
;
1869 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6
;
1870 mask
|= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC
;
1875 static struct event_constraint
*
1876 hswep_cbox_get_constraint(struct intel_uncore_box
*box
, struct perf_event
*event
)
1878 return __snbep_cbox_get_constraint(box
, event
, hswep_cbox_filter_mask
);
1881 static int hswep_cbox_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
1883 struct hw_perf_event_extra
*reg1
= &event
->hw
.extra_reg
;
1884 struct extra_reg
*er
;
1887 for (er
= hswep_uncore_cbox_extra_regs
; er
->msr
; er
++) {
1888 if (er
->event
!= (event
->hw
.config
& er
->config_mask
))
1894 reg1
->reg
= HSWEP_C0_MSR_PMON_BOX_FILTER0
+
1895 HSWEP_CBO_MSR_OFFSET
* box
->pmu
->pmu_idx
;
1896 reg1
->config
= event
->attr
.config1
& hswep_cbox_filter_mask(idx
);
1902 static void hswep_cbox_enable_event(struct intel_uncore_box
*box
,
1903 struct perf_event
*event
)
1905 struct hw_perf_event
*hwc
= &event
->hw
;
1906 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
1908 if (reg1
->idx
!= EXTRA_REG_NONE
) {
1909 u64 filter
= uncore_shared_reg_config(box
, 0);
1910 wrmsrl(reg1
->reg
, filter
& 0xffffffff);
1911 wrmsrl(reg1
->reg
+ 1, filter
>> 32);
1914 wrmsrl(hwc
->config_base
, hwc
->config
| SNBEP_PMON_CTL_EN
);
1917 static struct intel_uncore_ops hswep_uncore_cbox_ops
= {
1918 .init_box
= snbep_uncore_msr_init_box
,
1919 .disable_box
= snbep_uncore_msr_disable_box
,
1920 .enable_box
= snbep_uncore_msr_enable_box
,
1921 .disable_event
= snbep_uncore_msr_disable_event
,
1922 .enable_event
= hswep_cbox_enable_event
,
1923 .read_counter
= uncore_msr_read_counter
,
1924 .hw_config
= hswep_cbox_hw_config
,
1925 .get_constraint
= hswep_cbox_get_constraint
,
1926 .put_constraint
= snbep_cbox_put_constraint
,
1929 static struct intel_uncore_type hswep_uncore_cbox
= {
1933 .perf_ctr_bits
= 48,
1934 .event_ctl
= HSWEP_C0_MSR_PMON_CTL0
,
1935 .perf_ctr
= HSWEP_C0_MSR_PMON_CTR0
,
1936 .event_mask
= SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK
,
1937 .box_ctl
= HSWEP_C0_MSR_PMON_BOX_CTL
,
1938 .msr_offset
= HSWEP_CBO_MSR_OFFSET
,
1939 .num_shared_regs
= 1,
1940 .constraints
= hswep_uncore_cbox_constraints
,
1941 .ops
= &hswep_uncore_cbox_ops
,
1942 .format_group
= &hswep_uncore_cbox_format_group
,
1946 * Write SBOX Initialization register bit by bit to avoid spurious #GPs
1948 static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box
*box
)
1950 unsigned msr
= uncore_msr_box_ctl(box
);
1953 u64 init
= SNBEP_PMON_BOX_CTL_INT
;
1957 for_each_set_bit(i
, (unsigned long *)&init
, 64) {
1958 flags
|= (1ULL << i
);
1964 static struct intel_uncore_ops hswep_uncore_sbox_msr_ops
= {
1965 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1966 .init_box
= hswep_uncore_sbox_msr_init_box
1969 static struct attribute
*hswep_uncore_sbox_formats_attr
[] = {
1970 &format_attr_event
.attr
,
1971 &format_attr_umask
.attr
,
1972 &format_attr_edge
.attr
,
1973 &format_attr_tid_en
.attr
,
1974 &format_attr_inv
.attr
,
1975 &format_attr_thresh8
.attr
,
1979 static struct attribute_group hswep_uncore_sbox_format_group
= {
1981 .attrs
= hswep_uncore_sbox_formats_attr
,
1984 static struct intel_uncore_type hswep_uncore_sbox
= {
1988 .perf_ctr_bits
= 44,
1989 .event_ctl
= HSWEP_S0_MSR_PMON_CTL0
,
1990 .perf_ctr
= HSWEP_S0_MSR_PMON_CTR0
,
1991 .event_mask
= HSWEP_S_MSR_PMON_RAW_EVENT_MASK
,
1992 .box_ctl
= HSWEP_S0_MSR_PMON_BOX_CTL
,
1993 .msr_offset
= HSWEP_SBOX_MSR_OFFSET
,
1994 .ops
= &hswep_uncore_sbox_msr_ops
,
1995 .format_group
= &hswep_uncore_sbox_format_group
,
1998 static int hswep_pcu_hw_config(struct intel_uncore_box
*box
, struct perf_event
*event
)
2000 struct hw_perf_event
*hwc
= &event
->hw
;
2001 struct hw_perf_event_extra
*reg1
= &hwc
->extra_reg
;
2002 int ev_sel
= hwc
->config
& SNBEP_PMON_CTL_EV_SEL_MASK
;
2004 if (ev_sel
>= 0xb && ev_sel
<= 0xe) {
2005 reg1
->reg
= HSWEP_PCU_MSR_PMON_BOX_FILTER
;
2006 reg1
->idx
= ev_sel
- 0xb;
2007 reg1
->config
= event
->attr
.config1
& (0xff << reg1
->idx
);
2012 static struct intel_uncore_ops hswep_uncore_pcu_ops
= {
2013 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2014 .hw_config
= hswep_pcu_hw_config
,
2015 .get_constraint
= snbep_pcu_get_constraint
,
2016 .put_constraint
= snbep_pcu_put_constraint
,
2019 static struct intel_uncore_type hswep_uncore_pcu
= {
2023 .perf_ctr_bits
= 48,
2024 .perf_ctr
= HSWEP_PCU_MSR_PMON_CTR0
,
2025 .event_ctl
= HSWEP_PCU_MSR_PMON_CTL0
,
2026 .event_mask
= SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK
,
2027 .box_ctl
= HSWEP_PCU_MSR_PMON_BOX_CTL
,
2028 .num_shared_regs
= 1,
2029 .ops
= &hswep_uncore_pcu_ops
,
2030 .format_group
= &snbep_uncore_pcu_format_group
,
2033 static struct intel_uncore_type
*hswep_msr_uncores
[] = {
2041 void hswep_uncore_cpu_init(void)
2043 if (hswep_uncore_cbox
.num_boxes
> boot_cpu_data
.x86_max_cores
)
2044 hswep_uncore_cbox
.num_boxes
= boot_cpu_data
.x86_max_cores
;
2046 /* Detect 6-8 core systems with only two SBOXes */
2047 if (uncore_extra_pci_dev
[0][HSWEP_PCI_PCU_3
]) {
2050 pci_read_config_dword(uncore_extra_pci_dev
[0][HSWEP_PCI_PCU_3
],
2052 if (((capid4
>> 6) & 0x3) == 0)
2053 hswep_uncore_sbox
.num_boxes
= 2;
2056 uncore_msr_uncores
= hswep_msr_uncores
;
2059 static struct intel_uncore_type hswep_uncore_ha
= {
2063 .perf_ctr_bits
= 48,
2064 SNBEP_UNCORE_PCI_COMMON_INIT(),
2067 static struct uncore_event_desc hswep_uncore_imc_events
[] = {
2068 INTEL_UNCORE_EVENT_DESC(clockticks
, "event=0x00,umask=0x00"),
2069 INTEL_UNCORE_EVENT_DESC(cas_count_read
, "event=0x04,umask=0x03"),
2070 INTEL_UNCORE_EVENT_DESC(cas_count_read
.scale
, "6.103515625e-5"),
2071 INTEL_UNCORE_EVENT_DESC(cas_count_read
.unit
, "MiB"),
2072 INTEL_UNCORE_EVENT_DESC(cas_count_write
, "event=0x04,umask=0x0c"),
2073 INTEL_UNCORE_EVENT_DESC(cas_count_write
.scale
, "6.103515625e-5"),
2074 INTEL_UNCORE_EVENT_DESC(cas_count_write
.unit
, "MiB"),
2075 { /* end: all zeroes */ },
2078 static struct intel_uncore_type hswep_uncore_imc
= {
2082 .perf_ctr_bits
= 48,
2083 .fixed_ctr_bits
= 48,
2084 .fixed_ctr
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTR
,
2085 .fixed_ctl
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTL
,
2086 .event_descs
= hswep_uncore_imc_events
,
2087 SNBEP_UNCORE_PCI_COMMON_INIT(),
2090 static unsigned hswep_uncore_irp_ctrs
[] = {0xa0, 0xa8, 0xb0, 0xb8};
2092 static u64
hswep_uncore_irp_read_counter(struct intel_uncore_box
*box
, struct perf_event
*event
)
2094 struct pci_dev
*pdev
= box
->pci_dev
;
2095 struct hw_perf_event
*hwc
= &event
->hw
;
2098 pci_read_config_dword(pdev
, hswep_uncore_irp_ctrs
[hwc
->idx
], (u32
*)&count
);
2099 pci_read_config_dword(pdev
, hswep_uncore_irp_ctrs
[hwc
->idx
] + 4, (u32
*)&count
+ 1);
2104 static struct intel_uncore_ops hswep_uncore_irp_ops
= {
2105 .init_box
= snbep_uncore_pci_init_box
,
2106 .disable_box
= snbep_uncore_pci_disable_box
,
2107 .enable_box
= snbep_uncore_pci_enable_box
,
2108 .disable_event
= ivbep_uncore_irp_disable_event
,
2109 .enable_event
= ivbep_uncore_irp_enable_event
,
2110 .read_counter
= hswep_uncore_irp_read_counter
,
2113 static struct intel_uncore_type hswep_uncore_irp
= {
2117 .perf_ctr_bits
= 48,
2118 .event_mask
= SNBEP_PMON_RAW_EVENT_MASK
,
2119 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
2120 .ops
= &hswep_uncore_irp_ops
,
2121 .format_group
= &snbep_uncore_format_group
,
2124 static struct intel_uncore_type hswep_uncore_qpi
= {
2128 .perf_ctr_bits
= 48,
2129 .perf_ctr
= SNBEP_PCI_PMON_CTR0
,
2130 .event_ctl
= SNBEP_PCI_PMON_CTL0
,
2131 .event_mask
= SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK
,
2132 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
2133 .num_shared_regs
= 1,
2134 .ops
= &snbep_uncore_qpi_ops
,
2135 .format_group
= &snbep_uncore_qpi_format_group
,
2138 static struct event_constraint hswep_uncore_r2pcie_constraints
[] = {
2139 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2140 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2141 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2142 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
2143 UNCORE_EVENT_CONSTRAINT(0x24, 0x1),
2144 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
2145 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2146 UNCORE_EVENT_CONSTRAINT(0x27, 0x1),
2147 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2148 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2149 UNCORE_EVENT_CONSTRAINT(0x2a, 0x1),
2150 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
2151 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2152 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2153 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2154 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2155 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2156 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
2157 EVENT_CONSTRAINT_END
2160 static struct intel_uncore_type hswep_uncore_r2pcie
= {
2164 .perf_ctr_bits
= 48,
2165 .constraints
= hswep_uncore_r2pcie_constraints
,
2166 SNBEP_UNCORE_PCI_COMMON_INIT(),
2169 static struct event_constraint hswep_uncore_r3qpi_constraints
[] = {
2170 UNCORE_EVENT_CONSTRAINT(0x01, 0x3),
2171 UNCORE_EVENT_CONSTRAINT(0x07, 0x7),
2172 UNCORE_EVENT_CONSTRAINT(0x08, 0x7),
2173 UNCORE_EVENT_CONSTRAINT(0x09, 0x7),
2174 UNCORE_EVENT_CONSTRAINT(0x0a, 0x7),
2175 UNCORE_EVENT_CONSTRAINT(0x0e, 0x7),
2176 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2177 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2178 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
2179 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2180 UNCORE_EVENT_CONSTRAINT(0x14, 0x3),
2181 UNCORE_EVENT_CONSTRAINT(0x15, 0x3),
2182 UNCORE_EVENT_CONSTRAINT(0x1f, 0x3),
2183 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
2184 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
2185 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
2186 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2187 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
2188 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2189 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2190 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2191 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2192 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2193 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
2194 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
2195 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
2196 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2197 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2198 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2199 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
2200 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
2201 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2202 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
2203 EVENT_CONSTRAINT_END
2206 static struct intel_uncore_type hswep_uncore_r3qpi
= {
2210 .perf_ctr_bits
= 44,
2211 .constraints
= hswep_uncore_r3qpi_constraints
,
2212 SNBEP_UNCORE_PCI_COMMON_INIT(),
2216 HSWEP_PCI_UNCORE_HA
,
2217 HSWEP_PCI_UNCORE_IMC
,
2218 HSWEP_PCI_UNCORE_IRP
,
2219 HSWEP_PCI_UNCORE_QPI
,
2220 HSWEP_PCI_UNCORE_R2PCIE
,
2221 HSWEP_PCI_UNCORE_R3QPI
,
2224 static struct intel_uncore_type
*hswep_pci_uncores
[] = {
2225 [HSWEP_PCI_UNCORE_HA
] = &hswep_uncore_ha
,
2226 [HSWEP_PCI_UNCORE_IMC
] = &hswep_uncore_imc
,
2227 [HSWEP_PCI_UNCORE_IRP
] = &hswep_uncore_irp
,
2228 [HSWEP_PCI_UNCORE_QPI
] = &hswep_uncore_qpi
,
2229 [HSWEP_PCI_UNCORE_R2PCIE
] = &hswep_uncore_r2pcie
,
2230 [HSWEP_PCI_UNCORE_R3QPI
] = &hswep_uncore_r3qpi
,
2234 static const struct pci_device_id hswep_uncore_pci_ids
[] = {
2235 { /* Home Agent 0 */
2236 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f30),
2237 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA
, 0),
2239 { /* Home Agent 1 */
2240 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f38),
2241 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA
, 1),
2243 { /* MC0 Channel 0 */
2244 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fb0),
2245 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 0),
2247 { /* MC0 Channel 1 */
2248 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fb1),
2249 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 1),
2251 { /* MC0 Channel 2 */
2252 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fb4),
2253 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 2),
2255 { /* MC0 Channel 3 */
2256 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fb5),
2257 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 3),
2259 { /* MC1 Channel 0 */
2260 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fd0),
2261 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 4),
2263 { /* MC1 Channel 1 */
2264 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fd1),
2265 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 5),
2267 { /* MC1 Channel 2 */
2268 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fd4),
2269 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 6),
2271 { /* MC1 Channel 3 */
2272 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fd5),
2273 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC
, 7),
2276 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f39),
2277 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP
, 0),
2280 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f32),
2281 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI
, 0),
2284 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f33),
2285 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI
, 1),
2288 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f3a),
2289 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI
, 2),
2292 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f34),
2293 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE
, 0),
2295 { /* R3QPI0 Link 0 */
2296 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f36),
2297 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI
, 0),
2299 { /* R3QPI0 Link 1 */
2300 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f37),
2301 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI
, 1),
2303 { /* R3QPI1 Link 2 */
2304 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f3e),
2305 .driver_data
= UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI
, 2),
2307 { /* QPI Port 0 filter */
2308 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f86),
2309 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
2310 SNBEP_PCI_QPI_PORT0_FILTER
),
2312 { /* QPI Port 1 filter */
2313 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2f96),
2314 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
2315 SNBEP_PCI_QPI_PORT1_FILTER
),
2317 { /* PCU.3 (for Capability registers) */
2318 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x2fc0),
2319 .driver_data
= UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV
,
2322 { /* end: all zeroes */ }
2325 static struct pci_driver hswep_uncore_pci_driver
= {
2326 .name
= "hswep_uncore",
2327 .id_table
= hswep_uncore_pci_ids
,
2330 int hswep_uncore_pci_init(void)
2332 int ret
= snbep_pci2phy_map_init(0x2f1e);
2335 uncore_pci_uncores
= hswep_pci_uncores
;
2336 uncore_pci_driver
= &hswep_uncore_pci_driver
;
2339 /* end of Haswell-EP uncore support */
2341 /* BDX-DE uncore support */
2343 static struct intel_uncore_type bdx_uncore_ubox
= {
2347 .perf_ctr_bits
= 48,
2348 .fixed_ctr_bits
= 48,
2349 .perf_ctr
= HSWEP_U_MSR_PMON_CTR0
,
2350 .event_ctl
= HSWEP_U_MSR_PMON_CTL0
,
2351 .event_mask
= SNBEP_U_MSR_PMON_RAW_EVENT_MASK
,
2352 .fixed_ctr
= HSWEP_U_MSR_PMON_UCLK_FIXED_CTR
,
2353 .fixed_ctl
= HSWEP_U_MSR_PMON_UCLK_FIXED_CTL
,
2354 .num_shared_regs
= 1,
2355 .ops
= &ivbep_uncore_msr_ops
,
2356 .format_group
= &ivbep_uncore_ubox_format_group
,
2359 static struct event_constraint bdx_uncore_cbox_constraints
[] = {
2360 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
2361 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
2362 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
2363 EVENT_CONSTRAINT_END
2366 static struct intel_uncore_type bdx_uncore_cbox
= {
2370 .perf_ctr_bits
= 48,
2371 .event_ctl
= HSWEP_C0_MSR_PMON_CTL0
,
2372 .perf_ctr
= HSWEP_C0_MSR_PMON_CTR0
,
2373 .event_mask
= SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK
,
2374 .box_ctl
= HSWEP_C0_MSR_PMON_BOX_CTL
,
2375 .msr_offset
= HSWEP_CBO_MSR_OFFSET
,
2376 .num_shared_regs
= 1,
2377 .constraints
= bdx_uncore_cbox_constraints
,
2378 .ops
= &hswep_uncore_cbox_ops
,
2379 .format_group
= &hswep_uncore_cbox_format_group
,
2382 static struct intel_uncore_type
*bdx_msr_uncores
[] = {
2389 void bdx_uncore_cpu_init(void)
2391 if (bdx_uncore_cbox
.num_boxes
> boot_cpu_data
.x86_max_cores
)
2392 bdx_uncore_cbox
.num_boxes
= boot_cpu_data
.x86_max_cores
;
2393 uncore_msr_uncores
= bdx_msr_uncores
;
2396 static struct intel_uncore_type bdx_uncore_ha
= {
2400 .perf_ctr_bits
= 48,
2401 SNBEP_UNCORE_PCI_COMMON_INIT(),
2404 static struct intel_uncore_type bdx_uncore_imc
= {
2408 .perf_ctr_bits
= 48,
2409 .fixed_ctr_bits
= 48,
2410 .fixed_ctr
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTR
,
2411 .fixed_ctl
= SNBEP_MC_CHy_PCI_PMON_FIXED_CTL
,
2412 .event_descs
= hswep_uncore_imc_events
,
2413 SNBEP_UNCORE_PCI_COMMON_INIT(),
2416 static struct intel_uncore_type bdx_uncore_irp
= {
2420 .perf_ctr_bits
= 48,
2421 .event_mask
= SNBEP_PMON_RAW_EVENT_MASK
,
2422 .box_ctl
= SNBEP_PCI_PMON_BOX_CTL
,
2423 .ops
= &hswep_uncore_irp_ops
,
2424 .format_group
= &snbep_uncore_format_group
,
2428 static struct event_constraint bdx_uncore_r2pcie_constraints
[] = {
2429 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2430 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2431 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2432 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
2433 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
2434 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2435 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2436 EVENT_CONSTRAINT_END
2439 static struct intel_uncore_type bdx_uncore_r2pcie
= {
2443 .perf_ctr_bits
= 48,
2444 .constraints
= bdx_uncore_r2pcie_constraints
,
2445 SNBEP_UNCORE_PCI_COMMON_INIT(),
2452 BDX_PCI_UNCORE_R2PCIE
,
2455 static struct intel_uncore_type
*bdx_pci_uncores
[] = {
2456 [BDX_PCI_UNCORE_HA
] = &bdx_uncore_ha
,
2457 [BDX_PCI_UNCORE_IMC
] = &bdx_uncore_imc
,
2458 [BDX_PCI_UNCORE_IRP
] = &bdx_uncore_irp
,
2459 [BDX_PCI_UNCORE_R2PCIE
] = &bdx_uncore_r2pcie
,
2463 static const struct pci_device_id bdx_uncore_pci_ids
[] = {
2464 { /* Home Agent 0 */
2465 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x6f30),
2466 .driver_data
= UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA
, 0),
2468 { /* MC0 Channel 0 */
2469 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x6fb0),
2470 .driver_data
= UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC
, 0),
2472 { /* MC0 Channel 1 */
2473 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x6fb1),
2474 .driver_data
= UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC
, 1),
2477 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x6f39),
2478 .driver_data
= UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IRP
, 0),
2481 PCI_DEVICE(PCI_VENDOR_ID_INTEL
, 0x6f34),
2482 .driver_data
= UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R2PCIE
, 0),
2484 { /* end: all zeroes */ }
2487 static struct pci_driver bdx_uncore_pci_driver
= {
2488 .name
= "bdx_uncore",
2489 .id_table
= bdx_uncore_pci_ids
,
2492 int bdx_uncore_pci_init(void)
2494 int ret
= snbep_pci2phy_map_init(0x6f1e);
2498 uncore_pci_uncores
= bdx_pci_uncores
;
2499 uncore_pci_driver
= &bdx_uncore_pci_driver
;
2503 /* end of BDX-DE uncore support */