]>
Commit | Line | Data |
---|---|---|
8268fdfc YZ |
1 | /* SandyBridge-EP/IvyTown uncore support */ |
2 | #include "perf_event_intel_uncore.h" | |
3 | ||
4 | ||
5 | /* SNB-EP Box level control */ | |
6 | #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0) | |
7 | #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1) | |
8 | #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8) | |
9 | #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16) | |
10 | #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ | |
11 | SNBEP_PMON_BOX_CTL_RST_CTRS | \ | |
12 | SNBEP_PMON_BOX_CTL_FRZ_EN) | |
13 | /* SNB-EP event control */ | |
14 | #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff | |
15 | #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00 | |
16 | #define SNBEP_PMON_CTL_RST (1 << 17) | |
17 | #define SNBEP_PMON_CTL_EDGE_DET (1 << 18) | |
18 | #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21) | |
19 | #define SNBEP_PMON_CTL_EN (1 << 22) | |
20 | #define SNBEP_PMON_CTL_INVERT (1 << 23) | |
21 | #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000 | |
22 | #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ | |
23 | SNBEP_PMON_CTL_UMASK_MASK | \ | |
24 | SNBEP_PMON_CTL_EDGE_DET | \ | |
25 | SNBEP_PMON_CTL_INVERT | \ | |
26 | SNBEP_PMON_CTL_TRESH_MASK) | |
27 | ||
28 | /* SNB-EP Ubox event control */ | |
29 | #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000 | |
30 | #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \ | |
31 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | |
32 | SNBEP_PMON_CTL_UMASK_MASK | \ | |
33 | SNBEP_PMON_CTL_EDGE_DET | \ | |
34 | SNBEP_PMON_CTL_INVERT | \ | |
35 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) | |
36 | ||
37 | #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19) | |
38 | #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ | |
39 | SNBEP_CBO_PMON_CTL_TID_EN) | |
40 | ||
41 | /* SNB-EP PCU event control */ | |
42 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000 | |
43 | #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000 | |
44 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30) | |
45 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31) | |
46 | #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ | |
47 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | |
48 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ | |
49 | SNBEP_PMON_CTL_EDGE_DET | \ | |
50 | SNBEP_PMON_CTL_EV_SEL_EXT | \ | |
51 | SNBEP_PMON_CTL_INVERT | \ | |
52 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ | |
53 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ | |
54 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) | |
55 | ||
56 | #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ | |
57 | (SNBEP_PMON_RAW_EVENT_MASK | \ | |
58 | SNBEP_PMON_CTL_EV_SEL_EXT) | |
59 | ||
60 | /* SNB-EP pci control register */ | |
61 | #define SNBEP_PCI_PMON_BOX_CTL 0xf4 | |
62 | #define SNBEP_PCI_PMON_CTL0 0xd8 | |
63 | /* SNB-EP pci counter register */ | |
64 | #define SNBEP_PCI_PMON_CTR0 0xa0 | |
65 | ||
66 | /* SNB-EP home agent register */ | |
67 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40 | |
68 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44 | |
69 | #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48 | |
70 | /* SNB-EP memory controller register */ | |
71 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0 | |
72 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0 | |
73 | /* SNB-EP QPI register */ | |
74 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228 | |
75 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c | |
76 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238 | |
77 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c | |
78 | ||
79 | /* SNB-EP Ubox register */ | |
80 | #define SNBEP_U_MSR_PMON_CTR0 0xc16 | |
81 | #define SNBEP_U_MSR_PMON_CTL0 0xc10 | |
82 | ||
83 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08 | |
84 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09 | |
85 | ||
86 | /* SNB-EP Cbo register */ | |
87 | #define SNBEP_C0_MSR_PMON_CTR0 0xd16 | |
88 | #define SNBEP_C0_MSR_PMON_CTL0 0xd10 | |
89 | #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04 | |
90 | #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14 | |
91 | #define SNBEP_CBO_MSR_OFFSET 0x20 | |
92 | ||
93 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f | |
94 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00 | |
95 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000 | |
96 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000 | |
97 | ||
98 | #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \ | |
99 | .event = (e), \ | |
100 | .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \ | |
101 | .config_mask = (m), \ | |
102 | .idx = (i) \ | |
103 | } | |
104 | ||
105 | /* SNB-EP PCU register */ | |
106 | #define SNBEP_PCU_MSR_PMON_CTR0 0xc36 | |
107 | #define SNBEP_PCU_MSR_PMON_CTL0 0xc30 | |
108 | #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24 | |
109 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34 | |
110 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff | |
111 | #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc | |
112 | #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd | |
113 | ||
ddcd0973 PZ |
114 | /* IVBEP event control */ |
115 | #define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ | |
8268fdfc | 116 | SNBEP_PMON_BOX_CTL_RST_CTRS) |
ddcd0973 | 117 | #define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
8268fdfc YZ |
118 | SNBEP_PMON_CTL_UMASK_MASK | \ |
119 | SNBEP_PMON_CTL_EDGE_DET | \ | |
120 | SNBEP_PMON_CTL_TRESH_MASK) | |
ddcd0973 PZ |
121 | /* IVBEP Ubox */ |
122 | #define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00 | |
123 | #define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31) | |
124 | #define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29) | |
8268fdfc | 125 | |
ddcd0973 | 126 | #define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \ |
8268fdfc YZ |
127 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
128 | SNBEP_PMON_CTL_UMASK_MASK | \ | |
129 | SNBEP_PMON_CTL_EDGE_DET | \ | |
130 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) | |
ddcd0973 PZ |
131 | /* IVBEP Cbo */ |
132 | #define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \ | |
8268fdfc YZ |
133 | SNBEP_CBO_PMON_CTL_TID_EN) |
134 | ||
ddcd0973 PZ |
135 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0) |
136 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5) | |
137 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17) | |
138 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) | |
139 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) | |
140 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) | |
141 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) | |
7e96ae1a | 142 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) |
ddcd0973 PZ |
143 | |
144 | /* IVBEP home agent */ | |
145 | #define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16) | |
146 | #define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \ | |
147 | (IVBEP_PMON_RAW_EVENT_MASK | \ | |
148 | IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST) | |
149 | /* IVBEP PCU */ | |
150 | #define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ | |
8268fdfc YZ |
151 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
152 | SNBEP_PMON_CTL_EV_SEL_EXT | \ | |
153 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ | |
154 | SNBEP_PMON_CTL_EDGE_DET | \ | |
155 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ | |
156 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ | |
157 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) | |
ddcd0973 PZ |
158 | /* IVBEP QPI */ |
159 | #define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ | |
160 | (IVBEP_PMON_RAW_EVENT_MASK | \ | |
8268fdfc YZ |
161 | SNBEP_PMON_CTL_EV_SEL_EXT) |
162 | ||
163 | #define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \ | |
164 | ((1ULL << (n)) - 1))) | |
165 | ||
e735b9db | 166 | /* Haswell-EP Ubox */ |
8cf1a3de KL |
167 | #define HSWEP_U_MSR_PMON_CTR0 0x709 |
168 | #define HSWEP_U_MSR_PMON_CTL0 0x705 | |
e735b9db YZ |
169 | #define HSWEP_U_MSR_PMON_FILTER 0x707 |
170 | ||
171 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703 | |
172 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704 | |
173 | ||
174 | #define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0) | |
175 | #define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1) | |
176 | #define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \ | |
177 | (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \ | |
178 | HSWEP_U_MSR_PMON_BOX_FILTER_CID) | |
179 | ||
180 | /* Haswell-EP CBo */ | |
181 | #define HSWEP_C0_MSR_PMON_CTR0 0xe08 | |
182 | #define HSWEP_C0_MSR_PMON_CTL0 0xe01 | |
183 | #define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00 | |
184 | #define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05 | |
185 | #define HSWEP_CBO_MSR_OFFSET 0x10 | |
186 | ||
187 | ||
188 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0) | |
189 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6) | |
190 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17) | |
191 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) | |
192 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) | |
193 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) | |
194 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) | |
195 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) | |
196 | ||
197 | ||
198 | /* Haswell-EP Sbox */ | |
199 | #define HSWEP_S0_MSR_PMON_CTR0 0x726 | |
200 | #define HSWEP_S0_MSR_PMON_CTL0 0x721 | |
201 | #define HSWEP_S0_MSR_PMON_BOX_CTL 0x720 | |
202 | #define HSWEP_SBOX_MSR_OFFSET 0xa | |
203 | #define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ | |
204 | SNBEP_CBO_PMON_CTL_TID_EN) | |
205 | ||
206 | /* Haswell-EP PCU */ | |
207 | #define HSWEP_PCU_MSR_PMON_CTR0 0x717 | |
208 | #define HSWEP_PCU_MSR_PMON_CTL0 0x711 | |
209 | #define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710 | |
210 | #define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715 | |
211 | ||
212 | ||
8268fdfc YZ |
213 | DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7"); |
214 | DEFINE_UNCORE_FORMAT_ATTR(event_ext, event, "config:0-7,21"); | |
215 | DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15"); | |
216 | DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18"); | |
217 | DEFINE_UNCORE_FORMAT_ATTR(tid_en, tid_en, "config:19"); | |
218 | DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23"); | |
219 | DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31"); | |
220 | DEFINE_UNCORE_FORMAT_ATTR(thresh5, thresh, "config:24-28"); | |
221 | DEFINE_UNCORE_FORMAT_ATTR(occ_sel, occ_sel, "config:14-15"); | |
222 | DEFINE_UNCORE_FORMAT_ATTR(occ_invert, occ_invert, "config:30"); | |
223 | DEFINE_UNCORE_FORMAT_ATTR(occ_edge, occ_edge, "config:14-51"); | |
224 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid, filter_tid, "config1:0-4"); | |
e735b9db YZ |
225 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid2, filter_tid, "config1:0"); |
226 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid3, filter_tid, "config1:0-5"); | |
227 | DEFINE_UNCORE_FORMAT_ATTR(filter_cid, filter_cid, "config1:5"); | |
8268fdfc | 228 | DEFINE_UNCORE_FORMAT_ATTR(filter_link, filter_link, "config1:5-8"); |
e735b9db | 229 | DEFINE_UNCORE_FORMAT_ATTR(filter_link2, filter_link, "config1:6-8"); |
8268fdfc YZ |
230 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid, filter_nid, "config1:10-17"); |
231 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid2, filter_nid, "config1:32-47"); | |
232 | DEFINE_UNCORE_FORMAT_ATTR(filter_state, filter_state, "config1:18-22"); | |
233 | DEFINE_UNCORE_FORMAT_ATTR(filter_state2, filter_state, "config1:17-22"); | |
e735b9db | 234 | DEFINE_UNCORE_FORMAT_ATTR(filter_state3, filter_state, "config1:17-23"); |
8268fdfc YZ |
235 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc, filter_opc, "config1:23-31"); |
236 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc2, filter_opc, "config1:52-60"); | |
e735b9db YZ |
237 | DEFINE_UNCORE_FORMAT_ATTR(filter_nc, filter_nc, "config1:62"); |
238 | DEFINE_UNCORE_FORMAT_ATTR(filter_c6, filter_c6, "config1:61"); | |
239 | DEFINE_UNCORE_FORMAT_ATTR(filter_isoc, filter_isoc, "config1:63"); | |
8268fdfc YZ |
240 | DEFINE_UNCORE_FORMAT_ATTR(filter_band0, filter_band0, "config1:0-7"); |
241 | DEFINE_UNCORE_FORMAT_ATTR(filter_band1, filter_band1, "config1:8-15"); | |
242 | DEFINE_UNCORE_FORMAT_ATTR(filter_band2, filter_band2, "config1:16-23"); | |
243 | DEFINE_UNCORE_FORMAT_ATTR(filter_band3, filter_band3, "config1:24-31"); | |
244 | DEFINE_UNCORE_FORMAT_ATTR(match_rds, match_rds, "config1:48-51"); | |
245 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid30, match_rnid30, "config1:32-35"); | |
246 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid4, match_rnid4, "config1:31"); | |
247 | DEFINE_UNCORE_FORMAT_ATTR(match_dnid, match_dnid, "config1:13-17"); | |
248 | DEFINE_UNCORE_FORMAT_ATTR(match_mc, match_mc, "config1:9-12"); | |
249 | DEFINE_UNCORE_FORMAT_ATTR(match_opc, match_opc, "config1:5-8"); | |
250 | DEFINE_UNCORE_FORMAT_ATTR(match_vnw, match_vnw, "config1:3-4"); | |
251 | DEFINE_UNCORE_FORMAT_ATTR(match0, match0, "config1:0-31"); | |
252 | DEFINE_UNCORE_FORMAT_ATTR(match1, match1, "config1:32-63"); | |
253 | DEFINE_UNCORE_FORMAT_ATTR(mask_rds, mask_rds, "config2:48-51"); | |
254 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30, mask_rnid30, "config2:32-35"); | |
255 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4, mask_rnid4, "config2:31"); | |
256 | DEFINE_UNCORE_FORMAT_ATTR(mask_dnid, mask_dnid, "config2:13-17"); | |
257 | DEFINE_UNCORE_FORMAT_ATTR(mask_mc, mask_mc, "config2:9-12"); | |
258 | DEFINE_UNCORE_FORMAT_ATTR(mask_opc, mask_opc, "config2:5-8"); | |
259 | DEFINE_UNCORE_FORMAT_ATTR(mask_vnw, mask_vnw, "config2:3-4"); | |
260 | DEFINE_UNCORE_FORMAT_ATTR(mask0, mask0, "config2:0-31"); | |
261 | DEFINE_UNCORE_FORMAT_ATTR(mask1, mask1, "config2:32-63"); | |
262 | ||
263 | static void snbep_uncore_pci_disable_box(struct intel_uncore_box *box) | |
264 | { | |
265 | struct pci_dev *pdev = box->pci_dev; | |
266 | int box_ctl = uncore_pci_box_ctl(box); | |
267 | u32 config = 0; | |
268 | ||
269 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { | |
270 | config |= SNBEP_PMON_BOX_CTL_FRZ; | |
271 | pci_write_config_dword(pdev, box_ctl, config); | |
272 | } | |
273 | } | |
274 | ||
275 | static void snbep_uncore_pci_enable_box(struct intel_uncore_box *box) | |
276 | { | |
277 | struct pci_dev *pdev = box->pci_dev; | |
278 | int box_ctl = uncore_pci_box_ctl(box); | |
279 | u32 config = 0; | |
280 | ||
281 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { | |
282 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; | |
283 | pci_write_config_dword(pdev, box_ctl, config); | |
284 | } | |
285 | } | |
286 | ||
287 | static void snbep_uncore_pci_enable_event(struct intel_uncore_box *box, struct perf_event *event) | |
288 | { | |
289 | struct pci_dev *pdev = box->pci_dev; | |
290 | struct hw_perf_event *hwc = &event->hw; | |
291 | ||
292 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | |
293 | } | |
294 | ||
295 | static void snbep_uncore_pci_disable_event(struct intel_uncore_box *box, struct perf_event *event) | |
296 | { | |
297 | struct pci_dev *pdev = box->pci_dev; | |
298 | struct hw_perf_event *hwc = &event->hw; | |
299 | ||
300 | pci_write_config_dword(pdev, hwc->config_base, hwc->config); | |
301 | } | |
302 | ||
303 | static u64 snbep_uncore_pci_read_counter(struct intel_uncore_box *box, struct perf_event *event) | |
304 | { | |
305 | struct pci_dev *pdev = box->pci_dev; | |
306 | struct hw_perf_event *hwc = &event->hw; | |
307 | u64 count = 0; | |
308 | ||
309 | pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); | |
310 | pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1); | |
311 | ||
312 | return count; | |
313 | } | |
314 | ||
315 | static void snbep_uncore_pci_init_box(struct intel_uncore_box *box) | |
316 | { | |
317 | struct pci_dev *pdev = box->pci_dev; | |
318 | ||
319 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, SNBEP_PMON_BOX_CTL_INT); | |
320 | } | |
321 | ||
322 | static void snbep_uncore_msr_disable_box(struct intel_uncore_box *box) | |
323 | { | |
324 | u64 config; | |
325 | unsigned msr; | |
326 | ||
327 | msr = uncore_msr_box_ctl(box); | |
328 | if (msr) { | |
329 | rdmsrl(msr, config); | |
330 | config |= SNBEP_PMON_BOX_CTL_FRZ; | |
331 | wrmsrl(msr, config); | |
332 | } | |
333 | } | |
334 | ||
335 | static void snbep_uncore_msr_enable_box(struct intel_uncore_box *box) | |
336 | { | |
337 | u64 config; | |
338 | unsigned msr; | |
339 | ||
340 | msr = uncore_msr_box_ctl(box); | |
341 | if (msr) { | |
342 | rdmsrl(msr, config); | |
343 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; | |
344 | wrmsrl(msr, config); | |
345 | } | |
346 | } | |
347 | ||
348 | static void snbep_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | |
349 | { | |
350 | struct hw_perf_event *hwc = &event->hw; | |
351 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
352 | ||
353 | if (reg1->idx != EXTRA_REG_NONE) | |
354 | wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0)); | |
355 | ||
356 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | |
357 | } | |
358 | ||
359 | static void snbep_uncore_msr_disable_event(struct intel_uncore_box *box, | |
360 | struct perf_event *event) | |
361 | { | |
362 | struct hw_perf_event *hwc = &event->hw; | |
363 | ||
364 | wrmsrl(hwc->config_base, hwc->config); | |
365 | } | |
366 | ||
367 | static void snbep_uncore_msr_init_box(struct intel_uncore_box *box) | |
368 | { | |
369 | unsigned msr = uncore_msr_box_ctl(box); | |
370 | ||
371 | if (msr) | |
372 | wrmsrl(msr, SNBEP_PMON_BOX_CTL_INT); | |
373 | } | |
374 | ||
375 | static struct attribute *snbep_uncore_formats_attr[] = { | |
376 | &format_attr_event.attr, | |
377 | &format_attr_umask.attr, | |
378 | &format_attr_edge.attr, | |
379 | &format_attr_inv.attr, | |
380 | &format_attr_thresh8.attr, | |
381 | NULL, | |
382 | }; | |
383 | ||
384 | static struct attribute *snbep_uncore_ubox_formats_attr[] = { | |
385 | &format_attr_event.attr, | |
386 | &format_attr_umask.attr, | |
387 | &format_attr_edge.attr, | |
388 | &format_attr_inv.attr, | |
389 | &format_attr_thresh5.attr, | |
390 | NULL, | |
391 | }; | |
392 | ||
393 | static struct attribute *snbep_uncore_cbox_formats_attr[] = { | |
394 | &format_attr_event.attr, | |
395 | &format_attr_umask.attr, | |
396 | &format_attr_edge.attr, | |
397 | &format_attr_tid_en.attr, | |
398 | &format_attr_inv.attr, | |
399 | &format_attr_thresh8.attr, | |
400 | &format_attr_filter_tid.attr, | |
401 | &format_attr_filter_nid.attr, | |
402 | &format_attr_filter_state.attr, | |
403 | &format_attr_filter_opc.attr, | |
404 | NULL, | |
405 | }; | |
406 | ||
407 | static struct attribute *snbep_uncore_pcu_formats_attr[] = { | |
408 | &format_attr_event_ext.attr, | |
409 | &format_attr_occ_sel.attr, | |
410 | &format_attr_edge.attr, | |
411 | &format_attr_inv.attr, | |
412 | &format_attr_thresh5.attr, | |
413 | &format_attr_occ_invert.attr, | |
414 | &format_attr_occ_edge.attr, | |
415 | &format_attr_filter_band0.attr, | |
416 | &format_attr_filter_band1.attr, | |
417 | &format_attr_filter_band2.attr, | |
418 | &format_attr_filter_band3.attr, | |
419 | NULL, | |
420 | }; | |
421 | ||
422 | static struct attribute *snbep_uncore_qpi_formats_attr[] = { | |
423 | &format_attr_event_ext.attr, | |
424 | &format_attr_umask.attr, | |
425 | &format_attr_edge.attr, | |
426 | &format_attr_inv.attr, | |
427 | &format_attr_thresh8.attr, | |
428 | &format_attr_match_rds.attr, | |
429 | &format_attr_match_rnid30.attr, | |
430 | &format_attr_match_rnid4.attr, | |
431 | &format_attr_match_dnid.attr, | |
432 | &format_attr_match_mc.attr, | |
433 | &format_attr_match_opc.attr, | |
434 | &format_attr_match_vnw.attr, | |
435 | &format_attr_match0.attr, | |
436 | &format_attr_match1.attr, | |
437 | &format_attr_mask_rds.attr, | |
438 | &format_attr_mask_rnid30.attr, | |
439 | &format_attr_mask_rnid4.attr, | |
440 | &format_attr_mask_dnid.attr, | |
441 | &format_attr_mask_mc.attr, | |
442 | &format_attr_mask_opc.attr, | |
443 | &format_attr_mask_vnw.attr, | |
444 | &format_attr_mask0.attr, | |
445 | &format_attr_mask1.attr, | |
446 | NULL, | |
447 | }; | |
448 | ||
449 | static struct uncore_event_desc snbep_uncore_imc_events[] = { | |
450 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0x00"), | |
451 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), | |
c0737ce4 AK |
452 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), |
453 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), | |
8268fdfc | 454 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), |
c0737ce4 AK |
455 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), |
456 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), | |
8268fdfc YZ |
457 | { /* end: all zeroes */ }, |
458 | }; | |
459 | ||
460 | static struct uncore_event_desc snbep_uncore_qpi_events[] = { | |
461 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x14"), | |
462 | INTEL_UNCORE_EVENT_DESC(txl_flits_active, "event=0x00,umask=0x06"), | |
463 | INTEL_UNCORE_EVENT_DESC(drs_data, "event=0x102,umask=0x08"), | |
464 | INTEL_UNCORE_EVENT_DESC(ncb_data, "event=0x103,umask=0x04"), | |
465 | { /* end: all zeroes */ }, | |
466 | }; | |
467 | ||
468 | static struct attribute_group snbep_uncore_format_group = { | |
469 | .name = "format", | |
470 | .attrs = snbep_uncore_formats_attr, | |
471 | }; | |
472 | ||
473 | static struct attribute_group snbep_uncore_ubox_format_group = { | |
474 | .name = "format", | |
475 | .attrs = snbep_uncore_ubox_formats_attr, | |
476 | }; | |
477 | ||
478 | static struct attribute_group snbep_uncore_cbox_format_group = { | |
479 | .name = "format", | |
480 | .attrs = snbep_uncore_cbox_formats_attr, | |
481 | }; | |
482 | ||
483 | static struct attribute_group snbep_uncore_pcu_format_group = { | |
484 | .name = "format", | |
485 | .attrs = snbep_uncore_pcu_formats_attr, | |
486 | }; | |
487 | ||
488 | static struct attribute_group snbep_uncore_qpi_format_group = { | |
489 | .name = "format", | |
490 | .attrs = snbep_uncore_qpi_formats_attr, | |
491 | }; | |
492 | ||
68055915 | 493 | #define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
8268fdfc YZ |
494 | .disable_box = snbep_uncore_msr_disable_box, \ |
495 | .enable_box = snbep_uncore_msr_enable_box, \ | |
496 | .disable_event = snbep_uncore_msr_disable_event, \ | |
497 | .enable_event = snbep_uncore_msr_enable_event, \ | |
498 | .read_counter = uncore_msr_read_counter | |
499 | ||
68055915 AK |
500 | #define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
501 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \ | |
502 | .init_box = snbep_uncore_msr_init_box \ | |
503 | ||
8268fdfc YZ |
504 | static struct intel_uncore_ops snbep_uncore_msr_ops = { |
505 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
506 | }; | |
507 | ||
508 | #define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \ | |
509 | .init_box = snbep_uncore_pci_init_box, \ | |
510 | .disable_box = snbep_uncore_pci_disable_box, \ | |
511 | .enable_box = snbep_uncore_pci_enable_box, \ | |
512 | .disable_event = snbep_uncore_pci_disable_event, \ | |
513 | .read_counter = snbep_uncore_pci_read_counter | |
514 | ||
515 | static struct intel_uncore_ops snbep_uncore_pci_ops = { | |
516 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), | |
517 | .enable_event = snbep_uncore_pci_enable_event, \ | |
518 | }; | |
519 | ||
520 | static struct event_constraint snbep_uncore_cbox_constraints[] = { | |
521 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), | |
522 | UNCORE_EVENT_CONSTRAINT(0x02, 0x3), | |
523 | UNCORE_EVENT_CONSTRAINT(0x04, 0x3), | |
524 | UNCORE_EVENT_CONSTRAINT(0x05, 0x3), | |
525 | UNCORE_EVENT_CONSTRAINT(0x07, 0x3), | |
526 | UNCORE_EVENT_CONSTRAINT(0x09, 0x3), | |
527 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), | |
528 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | |
529 | UNCORE_EVENT_CONSTRAINT(0x13, 0x3), | |
530 | UNCORE_EVENT_CONSTRAINT(0x1b, 0xc), | |
531 | UNCORE_EVENT_CONSTRAINT(0x1c, 0xc), | |
532 | UNCORE_EVENT_CONSTRAINT(0x1d, 0xc), | |
533 | UNCORE_EVENT_CONSTRAINT(0x1e, 0xc), | |
534 | EVENT_CONSTRAINT_OVERLAP(0x1f, 0xe, 0xff), | |
535 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | |
536 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | |
537 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | |
538 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | |
539 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | |
540 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | |
541 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), | |
542 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), | |
543 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | |
544 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | |
545 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | |
546 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), | |
547 | EVENT_CONSTRAINT_END | |
548 | }; | |
549 | ||
550 | static struct event_constraint snbep_uncore_r2pcie_constraints[] = { | |
551 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | |
552 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | |
553 | UNCORE_EVENT_CONSTRAINT(0x12, 0x1), | |
554 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | |
555 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), | |
556 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | |
557 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | |
558 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | |
559 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | |
560 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | |
561 | EVENT_CONSTRAINT_END | |
562 | }; | |
563 | ||
564 | static struct event_constraint snbep_uncore_r3qpi_constraints[] = { | |
565 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | |
566 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | |
567 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | |
568 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | |
569 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), | |
570 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | |
571 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), | |
572 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | |
573 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), | |
574 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | |
575 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | |
576 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | |
577 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | |
578 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x3), | |
579 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), | |
580 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | |
581 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | |
582 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), | |
583 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), | |
584 | UNCORE_EVENT_CONSTRAINT(0x30, 0x3), | |
585 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | |
586 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | |
587 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | |
588 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | |
589 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), | |
590 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | |
591 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | |
592 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | |
593 | EVENT_CONSTRAINT_END | |
594 | }; | |
595 | ||
596 | static struct intel_uncore_type snbep_uncore_ubox = { | |
597 | .name = "ubox", | |
598 | .num_counters = 2, | |
599 | .num_boxes = 1, | |
600 | .perf_ctr_bits = 44, | |
601 | .fixed_ctr_bits = 48, | |
602 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, | |
603 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, | |
604 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, | |
605 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, | |
606 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, | |
607 | .ops = &snbep_uncore_msr_ops, | |
608 | .format_group = &snbep_uncore_ubox_format_group, | |
609 | }; | |
610 | ||
611 | static struct extra_reg snbep_uncore_cbox_extra_regs[] = { | |
612 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, | |
613 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | |
614 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | |
615 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6), | |
616 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | |
617 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6), | |
618 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | |
619 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6), | |
620 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6), | |
621 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8), | |
622 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8), | |
623 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa), | |
624 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa), | |
625 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2), | |
626 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2), | |
627 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2), | |
628 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2), | |
629 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8), | |
630 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8), | |
631 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa), | |
632 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa), | |
633 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2), | |
634 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2), | |
635 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2), | |
636 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2), | |
637 | EVENT_EXTRA_END | |
638 | }; | |
639 | ||
640 | static void snbep_cbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | |
641 | { | |
642 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
643 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | |
644 | int i; | |
645 | ||
646 | if (uncore_box_is_fake(box)) | |
647 | return; | |
648 | ||
649 | for (i = 0; i < 5; i++) { | |
650 | if (reg1->alloc & (0x1 << i)) | |
651 | atomic_sub(1 << (i * 6), &er->ref); | |
652 | } | |
653 | reg1->alloc = 0; | |
654 | } | |
655 | ||
656 | static struct event_constraint * | |
657 | __snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event, | |
658 | u64 (*cbox_filter_mask)(int fields)) | |
659 | { | |
660 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
661 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | |
662 | int i, alloc = 0; | |
663 | unsigned long flags; | |
664 | u64 mask; | |
665 | ||
666 | if (reg1->idx == EXTRA_REG_NONE) | |
667 | return NULL; | |
668 | ||
669 | raw_spin_lock_irqsave(&er->lock, flags); | |
670 | for (i = 0; i < 5; i++) { | |
671 | if (!(reg1->idx & (0x1 << i))) | |
672 | continue; | |
673 | if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i))) | |
674 | continue; | |
675 | ||
676 | mask = cbox_filter_mask(0x1 << i); | |
677 | if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) || | |
678 | !((reg1->config ^ er->config) & mask)) { | |
679 | atomic_add(1 << (i * 6), &er->ref); | |
680 | er->config &= ~mask; | |
681 | er->config |= reg1->config & mask; | |
682 | alloc |= (0x1 << i); | |
683 | } else { | |
684 | break; | |
685 | } | |
686 | } | |
687 | raw_spin_unlock_irqrestore(&er->lock, flags); | |
688 | if (i < 5) | |
689 | goto fail; | |
690 | ||
691 | if (!uncore_box_is_fake(box)) | |
692 | reg1->alloc |= alloc; | |
693 | ||
694 | return NULL; | |
695 | fail: | |
696 | for (; i >= 0; i--) { | |
697 | if (alloc & (0x1 << i)) | |
698 | atomic_sub(1 << (i * 6), &er->ref); | |
699 | } | |
700 | return &uncore_constraint_empty; | |
701 | } | |
702 | ||
703 | static u64 snbep_cbox_filter_mask(int fields) | |
704 | { | |
705 | u64 mask = 0; | |
706 | ||
707 | if (fields & 0x1) | |
708 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID; | |
709 | if (fields & 0x2) | |
710 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID; | |
711 | if (fields & 0x4) | |
712 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE; | |
713 | if (fields & 0x8) | |
714 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC; | |
715 | ||
716 | return mask; | |
717 | } | |
718 | ||
719 | static struct event_constraint * | |
720 | snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | |
721 | { | |
722 | return __snbep_cbox_get_constraint(box, event, snbep_cbox_filter_mask); | |
723 | } | |
724 | ||
725 | static int snbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
726 | { | |
727 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
728 | struct extra_reg *er; | |
729 | int idx = 0; | |
730 | ||
731 | for (er = snbep_uncore_cbox_extra_regs; er->msr; er++) { | |
732 | if (er->event != (event->hw.config & er->config_mask)) | |
733 | continue; | |
734 | idx |= er->idx; | |
735 | } | |
736 | ||
737 | if (idx) { | |
738 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + | |
739 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | |
740 | reg1->config = event->attr.config1 & snbep_cbox_filter_mask(idx); | |
741 | reg1->idx = idx; | |
742 | } | |
743 | return 0; | |
744 | } | |
745 | ||
746 | static struct intel_uncore_ops snbep_uncore_cbox_ops = { | |
747 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
748 | .hw_config = snbep_cbox_hw_config, | |
749 | .get_constraint = snbep_cbox_get_constraint, | |
750 | .put_constraint = snbep_cbox_put_constraint, | |
751 | }; | |
752 | ||
753 | static struct intel_uncore_type snbep_uncore_cbox = { | |
754 | .name = "cbox", | |
755 | .num_counters = 4, | |
756 | .num_boxes = 8, | |
757 | .perf_ctr_bits = 44, | |
758 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, | |
759 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, | |
760 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | |
761 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, | |
762 | .msr_offset = SNBEP_CBO_MSR_OFFSET, | |
763 | .num_shared_regs = 1, | |
764 | .constraints = snbep_uncore_cbox_constraints, | |
765 | .ops = &snbep_uncore_cbox_ops, | |
766 | .format_group = &snbep_uncore_cbox_format_group, | |
767 | }; | |
768 | ||
769 | static u64 snbep_pcu_alter_er(struct perf_event *event, int new_idx, bool modify) | |
770 | { | |
771 | struct hw_perf_event *hwc = &event->hw; | |
772 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
773 | u64 config = reg1->config; | |
774 | ||
775 | if (new_idx > reg1->idx) | |
776 | config <<= 8 * (new_idx - reg1->idx); | |
777 | else | |
778 | config >>= 8 * (reg1->idx - new_idx); | |
779 | ||
780 | if (modify) { | |
781 | hwc->config += new_idx - reg1->idx; | |
782 | reg1->config = config; | |
783 | reg1->idx = new_idx; | |
784 | } | |
785 | return config; | |
786 | } | |
787 | ||
788 | static struct event_constraint * | |
789 | snbep_pcu_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | |
790 | { | |
791 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
792 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | |
793 | unsigned long flags; | |
794 | int idx = reg1->idx; | |
795 | u64 mask, config1 = reg1->config; | |
796 | bool ok = false; | |
797 | ||
798 | if (reg1->idx == EXTRA_REG_NONE || | |
799 | (!uncore_box_is_fake(box) && reg1->alloc)) | |
800 | return NULL; | |
801 | again: | |
802 | mask = 0xffULL << (idx * 8); | |
803 | raw_spin_lock_irqsave(&er->lock, flags); | |
804 | if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) || | |
805 | !((config1 ^ er->config) & mask)) { | |
806 | atomic_add(1 << (idx * 8), &er->ref); | |
807 | er->config &= ~mask; | |
808 | er->config |= config1 & mask; | |
809 | ok = true; | |
810 | } | |
811 | raw_spin_unlock_irqrestore(&er->lock, flags); | |
812 | ||
813 | if (!ok) { | |
814 | idx = (idx + 1) % 4; | |
815 | if (idx != reg1->idx) { | |
816 | config1 = snbep_pcu_alter_er(event, idx, false); | |
817 | goto again; | |
818 | } | |
819 | return &uncore_constraint_empty; | |
820 | } | |
821 | ||
822 | if (!uncore_box_is_fake(box)) { | |
823 | if (idx != reg1->idx) | |
824 | snbep_pcu_alter_er(event, idx, true); | |
825 | reg1->alloc = 1; | |
826 | } | |
827 | return NULL; | |
828 | } | |
829 | ||
830 | static void snbep_pcu_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | |
831 | { | |
832 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
833 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | |
834 | ||
835 | if (uncore_box_is_fake(box) || !reg1->alloc) | |
836 | return; | |
837 | ||
838 | atomic_sub(1 << (reg1->idx * 8), &er->ref); | |
839 | reg1->alloc = 0; | |
840 | } | |
841 | ||
842 | static int snbep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
843 | { | |
844 | struct hw_perf_event *hwc = &event->hw; | |
845 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
846 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; | |
847 | ||
848 | if (ev_sel >= 0xb && ev_sel <= 0xe) { | |
849 | reg1->reg = SNBEP_PCU_MSR_PMON_BOX_FILTER; | |
850 | reg1->idx = ev_sel - 0xb; | |
b10fc1c3 | 851 | reg1->config = event->attr.config1 & (0xff << (reg1->idx * 8)); |
8268fdfc YZ |
852 | } |
853 | return 0; | |
854 | } | |
855 | ||
856 | static struct intel_uncore_ops snbep_uncore_pcu_ops = { | |
857 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
858 | .hw_config = snbep_pcu_hw_config, | |
859 | .get_constraint = snbep_pcu_get_constraint, | |
860 | .put_constraint = snbep_pcu_put_constraint, | |
861 | }; | |
862 | ||
863 | static struct intel_uncore_type snbep_uncore_pcu = { | |
864 | .name = "pcu", | |
865 | .num_counters = 4, | |
866 | .num_boxes = 1, | |
867 | .perf_ctr_bits = 48, | |
868 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, | |
869 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, | |
870 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, | |
871 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, | |
872 | .num_shared_regs = 1, | |
873 | .ops = &snbep_uncore_pcu_ops, | |
874 | .format_group = &snbep_uncore_pcu_format_group, | |
875 | }; | |
876 | ||
877 | static struct intel_uncore_type *snbep_msr_uncores[] = { | |
878 | &snbep_uncore_ubox, | |
879 | &snbep_uncore_cbox, | |
880 | &snbep_uncore_pcu, | |
881 | NULL, | |
882 | }; | |
883 | ||
884 | void snbep_uncore_cpu_init(void) | |
885 | { | |
886 | if (snbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | |
887 | snbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | |
888 | uncore_msr_uncores = snbep_msr_uncores; | |
889 | } | |
890 | ||
891 | enum { | |
892 | SNBEP_PCI_QPI_PORT0_FILTER, | |
893 | SNBEP_PCI_QPI_PORT1_FILTER, | |
5306c31c | 894 | HSWEP_PCI_PCU_3, |
8268fdfc YZ |
895 | }; |
896 | ||
897 | static int snbep_qpi_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
898 | { | |
899 | struct hw_perf_event *hwc = &event->hw; | |
900 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
901 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | |
902 | ||
903 | if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) { | |
904 | reg1->idx = 0; | |
905 | reg1->reg = SNBEP_Q_Py_PCI_PMON_PKT_MATCH0; | |
906 | reg1->config = event->attr.config1; | |
907 | reg2->reg = SNBEP_Q_Py_PCI_PMON_PKT_MASK0; | |
908 | reg2->config = event->attr.config2; | |
909 | } | |
910 | return 0; | |
911 | } | |
912 | ||
913 | static void snbep_qpi_enable_event(struct intel_uncore_box *box, struct perf_event *event) | |
914 | { | |
915 | struct pci_dev *pdev = box->pci_dev; | |
916 | struct hw_perf_event *hwc = &event->hw; | |
917 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
918 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | |
919 | ||
920 | if (reg1->idx != EXTRA_REG_NONE) { | |
921 | int idx = box->pmu->pmu_idx + SNBEP_PCI_QPI_PORT0_FILTER; | |
922 | struct pci_dev *filter_pdev = uncore_extra_pci_dev[box->phys_id][idx]; | |
8268fdfc YZ |
923 | if (filter_pdev) { |
924 | pci_write_config_dword(filter_pdev, reg1->reg, | |
925 | (u32)reg1->config); | |
926 | pci_write_config_dword(filter_pdev, reg1->reg + 4, | |
927 | (u32)(reg1->config >> 32)); | |
928 | pci_write_config_dword(filter_pdev, reg2->reg, | |
929 | (u32)reg2->config); | |
930 | pci_write_config_dword(filter_pdev, reg2->reg + 4, | |
931 | (u32)(reg2->config >> 32)); | |
932 | } | |
933 | } | |
934 | ||
935 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | |
936 | } | |
937 | ||
938 | static struct intel_uncore_ops snbep_uncore_qpi_ops = { | |
939 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), | |
940 | .enable_event = snbep_qpi_enable_event, | |
941 | .hw_config = snbep_qpi_hw_config, | |
942 | .get_constraint = uncore_get_constraint, | |
943 | .put_constraint = uncore_put_constraint, | |
944 | }; | |
945 | ||
946 | #define SNBEP_UNCORE_PCI_COMMON_INIT() \ | |
947 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ | |
948 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ | |
949 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \ | |
950 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ | |
951 | .ops = &snbep_uncore_pci_ops, \ | |
952 | .format_group = &snbep_uncore_format_group | |
953 | ||
954 | static struct intel_uncore_type snbep_uncore_ha = { | |
955 | .name = "ha", | |
956 | .num_counters = 4, | |
957 | .num_boxes = 1, | |
958 | .perf_ctr_bits = 48, | |
959 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
960 | }; | |
961 | ||
962 | static struct intel_uncore_type snbep_uncore_imc = { | |
963 | .name = "imc", | |
964 | .num_counters = 4, | |
965 | .num_boxes = 4, | |
966 | .perf_ctr_bits = 48, | |
967 | .fixed_ctr_bits = 48, | |
968 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | |
969 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | |
970 | .event_descs = snbep_uncore_imc_events, | |
971 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
972 | }; | |
973 | ||
974 | static struct intel_uncore_type snbep_uncore_qpi = { | |
975 | .name = "qpi", | |
976 | .num_counters = 4, | |
977 | .num_boxes = 2, | |
978 | .perf_ctr_bits = 48, | |
979 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | |
980 | .event_ctl = SNBEP_PCI_PMON_CTL0, | |
981 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, | |
982 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | |
983 | .num_shared_regs = 1, | |
984 | .ops = &snbep_uncore_qpi_ops, | |
985 | .event_descs = snbep_uncore_qpi_events, | |
986 | .format_group = &snbep_uncore_qpi_format_group, | |
987 | }; | |
988 | ||
989 | ||
990 | static struct intel_uncore_type snbep_uncore_r2pcie = { | |
991 | .name = "r2pcie", | |
992 | .num_counters = 4, | |
993 | .num_boxes = 1, | |
994 | .perf_ctr_bits = 44, | |
995 | .constraints = snbep_uncore_r2pcie_constraints, | |
996 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
997 | }; | |
998 | ||
999 | static struct intel_uncore_type snbep_uncore_r3qpi = { | |
1000 | .name = "r3qpi", | |
1001 | .num_counters = 3, | |
1002 | .num_boxes = 2, | |
1003 | .perf_ctr_bits = 44, | |
1004 | .constraints = snbep_uncore_r3qpi_constraints, | |
1005 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
1006 | }; | |
1007 | ||
1008 | enum { | |
1009 | SNBEP_PCI_UNCORE_HA, | |
1010 | SNBEP_PCI_UNCORE_IMC, | |
1011 | SNBEP_PCI_UNCORE_QPI, | |
1012 | SNBEP_PCI_UNCORE_R2PCIE, | |
1013 | SNBEP_PCI_UNCORE_R3QPI, | |
1014 | }; | |
1015 | ||
1016 | static struct intel_uncore_type *snbep_pci_uncores[] = { | |
1017 | [SNBEP_PCI_UNCORE_HA] = &snbep_uncore_ha, | |
1018 | [SNBEP_PCI_UNCORE_IMC] = &snbep_uncore_imc, | |
1019 | [SNBEP_PCI_UNCORE_QPI] = &snbep_uncore_qpi, | |
1020 | [SNBEP_PCI_UNCORE_R2PCIE] = &snbep_uncore_r2pcie, | |
1021 | [SNBEP_PCI_UNCORE_R3QPI] = &snbep_uncore_r3qpi, | |
1022 | NULL, | |
1023 | }; | |
1024 | ||
83bc90e1 | 1025 | static const struct pci_device_id snbep_uncore_pci_ids[] = { |
8268fdfc YZ |
1026 | { /* Home Agent */ |
1027 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_HA), | |
1028 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA, 0), | |
1029 | }, | |
1030 | { /* MC Channel 0 */ | |
1031 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC0), | |
1032 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 0), | |
1033 | }, | |
1034 | { /* MC Channel 1 */ | |
1035 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC1), | |
1036 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 1), | |
1037 | }, | |
1038 | { /* MC Channel 2 */ | |
1039 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC2), | |
1040 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 2), | |
1041 | }, | |
1042 | { /* MC Channel 3 */ | |
1043 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC3), | |
1044 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 3), | |
1045 | }, | |
1046 | { /* QPI Port 0 */ | |
1047 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI0), | |
1048 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 0), | |
1049 | }, | |
1050 | { /* QPI Port 1 */ | |
1051 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI1), | |
1052 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 1), | |
1053 | }, | |
1054 | { /* R2PCIe */ | |
1055 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R2PCIE), | |
1056 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE, 0), | |
1057 | }, | |
1058 | { /* R3QPI Link 0 */ | |
1059 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI0), | |
1060 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 0), | |
1061 | }, | |
1062 | { /* R3QPI Link 1 */ | |
1063 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI1), | |
1064 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 1), | |
1065 | }, | |
1066 | { /* QPI Port 0 filter */ | |
1067 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c86), | |
1068 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
1069 | SNBEP_PCI_QPI_PORT0_FILTER), | |
1070 | }, | |
1071 | { /* QPI Port 0 filter */ | |
1072 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c96), | |
1073 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
1074 | SNBEP_PCI_QPI_PORT1_FILTER), | |
1075 | }, | |
1076 | { /* end: all zeroes */ } | |
1077 | }; | |
1078 | ||
1079 | static struct pci_driver snbep_uncore_pci_driver = { | |
1080 | .name = "snbep_uncore", | |
1081 | .id_table = snbep_uncore_pci_ids, | |
1082 | }; | |
1083 | ||
1084 | /* | |
1085 | * build pci bus to socket mapping | |
1086 | */ | |
1087 | static int snbep_pci2phy_map_init(int devid) | |
1088 | { | |
1089 | struct pci_dev *ubox_dev = NULL; | |
1090 | int i, bus, nodeid; | |
1091 | int err = 0; | |
1092 | u32 config = 0; | |
1093 | ||
1094 | while (1) { | |
1095 | /* find the UBOX device */ | |
1096 | ubox_dev = pci_get_device(PCI_VENDOR_ID_INTEL, devid, ubox_dev); | |
1097 | if (!ubox_dev) | |
1098 | break; | |
1099 | bus = ubox_dev->bus->number; | |
1100 | /* get the Node ID of the local register */ | |
1101 | err = pci_read_config_dword(ubox_dev, 0x40, &config); | |
1102 | if (err) | |
1103 | break; | |
1104 | nodeid = config; | |
1105 | /* get the Node ID mapping */ | |
1106 | err = pci_read_config_dword(ubox_dev, 0x54, &config); | |
1107 | if (err) | |
1108 | break; | |
1109 | /* | |
1110 | * every three bits in the Node ID mapping register maps | |
1111 | * to a particular node. | |
1112 | */ | |
1113 | for (i = 0; i < 8; i++) { | |
1114 | if (nodeid == ((config >> (3 * i)) & 0x7)) { | |
1115 | uncore_pcibus_to_physid[bus] = i; | |
1116 | break; | |
1117 | } | |
1118 | } | |
1119 | } | |
1120 | ||
1121 | if (!err) { | |
1122 | /* | |
1123 | * For PCI bus with no UBOX device, find the next bus | |
1124 | * that has UBOX device and use its mapping. | |
1125 | */ | |
1126 | i = -1; | |
1127 | for (bus = 255; bus >= 0; bus--) { | |
1128 | if (uncore_pcibus_to_physid[bus] >= 0) | |
1129 | i = uncore_pcibus_to_physid[bus]; | |
1130 | else | |
1131 | uncore_pcibus_to_physid[bus] = i; | |
1132 | } | |
1133 | } | |
1134 | ||
8e57c586 | 1135 | pci_dev_put(ubox_dev); |
8268fdfc YZ |
1136 | |
1137 | return err ? pcibios_err_to_errno(err) : 0; | |
1138 | } | |
1139 | ||
1140 | int snbep_uncore_pci_init(void) | |
1141 | { | |
1142 | int ret = snbep_pci2phy_map_init(0x3ce0); | |
1143 | if (ret) | |
1144 | return ret; | |
1145 | uncore_pci_uncores = snbep_pci_uncores; | |
1146 | uncore_pci_driver = &snbep_uncore_pci_driver; | |
1147 | return 0; | |
1148 | } | |
1149 | /* end of Sandy Bridge-EP uncore support */ | |
1150 | ||
1151 | /* IvyTown uncore support */ | |
ddcd0973 | 1152 | static void ivbep_uncore_msr_init_box(struct intel_uncore_box *box) |
8268fdfc YZ |
1153 | { |
1154 | unsigned msr = uncore_msr_box_ctl(box); | |
1155 | if (msr) | |
ddcd0973 | 1156 | wrmsrl(msr, IVBEP_PMON_BOX_CTL_INT); |
8268fdfc YZ |
1157 | } |
1158 | ||
ddcd0973 | 1159 | static void ivbep_uncore_pci_init_box(struct intel_uncore_box *box) |
8268fdfc YZ |
1160 | { |
1161 | struct pci_dev *pdev = box->pci_dev; | |
1162 | ||
ddcd0973 | 1163 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT); |
8268fdfc YZ |
1164 | } |
1165 | ||
ddcd0973 PZ |
1166 | #define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
1167 | .init_box = ivbep_uncore_msr_init_box, \ | |
8268fdfc YZ |
1168 | .disable_box = snbep_uncore_msr_disable_box, \ |
1169 | .enable_box = snbep_uncore_msr_enable_box, \ | |
1170 | .disable_event = snbep_uncore_msr_disable_event, \ | |
1171 | .enable_event = snbep_uncore_msr_enable_event, \ | |
1172 | .read_counter = uncore_msr_read_counter | |
1173 | ||
ddcd0973 PZ |
1174 | static struct intel_uncore_ops ivbep_uncore_msr_ops = { |
1175 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
8268fdfc YZ |
1176 | }; |
1177 | ||
ddcd0973 PZ |
1178 | static struct intel_uncore_ops ivbep_uncore_pci_ops = { |
1179 | .init_box = ivbep_uncore_pci_init_box, | |
8268fdfc YZ |
1180 | .disable_box = snbep_uncore_pci_disable_box, |
1181 | .enable_box = snbep_uncore_pci_enable_box, | |
1182 | .disable_event = snbep_uncore_pci_disable_event, | |
1183 | .enable_event = snbep_uncore_pci_enable_event, | |
1184 | .read_counter = snbep_uncore_pci_read_counter, | |
1185 | }; | |
1186 | ||
ddcd0973 | 1187 | #define IVBEP_UNCORE_PCI_COMMON_INIT() \ |
8268fdfc YZ |
1188 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ |
1189 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ | |
ddcd0973 | 1190 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \ |
8268fdfc | 1191 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ |
ddcd0973 PZ |
1192 | .ops = &ivbep_uncore_pci_ops, \ |
1193 | .format_group = &ivbep_uncore_format_group | |
8268fdfc | 1194 | |
ddcd0973 | 1195 | static struct attribute *ivbep_uncore_formats_attr[] = { |
8268fdfc YZ |
1196 | &format_attr_event.attr, |
1197 | &format_attr_umask.attr, | |
1198 | &format_attr_edge.attr, | |
1199 | &format_attr_inv.attr, | |
1200 | &format_attr_thresh8.attr, | |
1201 | NULL, | |
1202 | }; | |
1203 | ||
ddcd0973 | 1204 | static struct attribute *ivbep_uncore_ubox_formats_attr[] = { |
8268fdfc YZ |
1205 | &format_attr_event.attr, |
1206 | &format_attr_umask.attr, | |
1207 | &format_attr_edge.attr, | |
1208 | &format_attr_inv.attr, | |
1209 | &format_attr_thresh5.attr, | |
1210 | NULL, | |
1211 | }; | |
1212 | ||
ddcd0973 | 1213 | static struct attribute *ivbep_uncore_cbox_formats_attr[] = { |
8268fdfc YZ |
1214 | &format_attr_event.attr, |
1215 | &format_attr_umask.attr, | |
1216 | &format_attr_edge.attr, | |
1217 | &format_attr_tid_en.attr, | |
1218 | &format_attr_thresh8.attr, | |
1219 | &format_attr_filter_tid.attr, | |
1220 | &format_attr_filter_link.attr, | |
1221 | &format_attr_filter_state2.attr, | |
1222 | &format_attr_filter_nid2.attr, | |
1223 | &format_attr_filter_opc2.attr, | |
7e96ae1a AK |
1224 | &format_attr_filter_nc.attr, |
1225 | &format_attr_filter_c6.attr, | |
1226 | &format_attr_filter_isoc.attr, | |
8268fdfc YZ |
1227 | NULL, |
1228 | }; | |
1229 | ||
ddcd0973 | 1230 | static struct attribute *ivbep_uncore_pcu_formats_attr[] = { |
8268fdfc YZ |
1231 | &format_attr_event_ext.attr, |
1232 | &format_attr_occ_sel.attr, | |
1233 | &format_attr_edge.attr, | |
1234 | &format_attr_thresh5.attr, | |
1235 | &format_attr_occ_invert.attr, | |
1236 | &format_attr_occ_edge.attr, | |
1237 | &format_attr_filter_band0.attr, | |
1238 | &format_attr_filter_band1.attr, | |
1239 | &format_attr_filter_band2.attr, | |
1240 | &format_attr_filter_band3.attr, | |
1241 | NULL, | |
1242 | }; | |
1243 | ||
ddcd0973 | 1244 | static struct attribute *ivbep_uncore_qpi_formats_attr[] = { |
8268fdfc YZ |
1245 | &format_attr_event_ext.attr, |
1246 | &format_attr_umask.attr, | |
1247 | &format_attr_edge.attr, | |
1248 | &format_attr_thresh8.attr, | |
1249 | &format_attr_match_rds.attr, | |
1250 | &format_attr_match_rnid30.attr, | |
1251 | &format_attr_match_rnid4.attr, | |
1252 | &format_attr_match_dnid.attr, | |
1253 | &format_attr_match_mc.attr, | |
1254 | &format_attr_match_opc.attr, | |
1255 | &format_attr_match_vnw.attr, | |
1256 | &format_attr_match0.attr, | |
1257 | &format_attr_match1.attr, | |
1258 | &format_attr_mask_rds.attr, | |
1259 | &format_attr_mask_rnid30.attr, | |
1260 | &format_attr_mask_rnid4.attr, | |
1261 | &format_attr_mask_dnid.attr, | |
1262 | &format_attr_mask_mc.attr, | |
1263 | &format_attr_mask_opc.attr, | |
1264 | &format_attr_mask_vnw.attr, | |
1265 | &format_attr_mask0.attr, | |
1266 | &format_attr_mask1.attr, | |
1267 | NULL, | |
1268 | }; | |
1269 | ||
ddcd0973 | 1270 | static struct attribute_group ivbep_uncore_format_group = { |
8268fdfc | 1271 | .name = "format", |
ddcd0973 | 1272 | .attrs = ivbep_uncore_formats_attr, |
8268fdfc YZ |
1273 | }; |
1274 | ||
ddcd0973 | 1275 | static struct attribute_group ivbep_uncore_ubox_format_group = { |
8268fdfc | 1276 | .name = "format", |
ddcd0973 | 1277 | .attrs = ivbep_uncore_ubox_formats_attr, |
8268fdfc YZ |
1278 | }; |
1279 | ||
ddcd0973 | 1280 | static struct attribute_group ivbep_uncore_cbox_format_group = { |
8268fdfc | 1281 | .name = "format", |
ddcd0973 | 1282 | .attrs = ivbep_uncore_cbox_formats_attr, |
8268fdfc YZ |
1283 | }; |
1284 | ||
ddcd0973 | 1285 | static struct attribute_group ivbep_uncore_pcu_format_group = { |
8268fdfc | 1286 | .name = "format", |
ddcd0973 | 1287 | .attrs = ivbep_uncore_pcu_formats_attr, |
8268fdfc YZ |
1288 | }; |
1289 | ||
ddcd0973 | 1290 | static struct attribute_group ivbep_uncore_qpi_format_group = { |
8268fdfc | 1291 | .name = "format", |
ddcd0973 | 1292 | .attrs = ivbep_uncore_qpi_formats_attr, |
8268fdfc YZ |
1293 | }; |
1294 | ||
ddcd0973 | 1295 | static struct intel_uncore_type ivbep_uncore_ubox = { |
8268fdfc YZ |
1296 | .name = "ubox", |
1297 | .num_counters = 2, | |
1298 | .num_boxes = 1, | |
1299 | .perf_ctr_bits = 44, | |
1300 | .fixed_ctr_bits = 48, | |
1301 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, | |
1302 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, | |
ddcd0973 | 1303 | .event_mask = IVBEP_U_MSR_PMON_RAW_EVENT_MASK, |
8268fdfc YZ |
1304 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, |
1305 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, | |
ddcd0973 PZ |
1306 | .ops = &ivbep_uncore_msr_ops, |
1307 | .format_group = &ivbep_uncore_ubox_format_group, | |
8268fdfc YZ |
1308 | }; |
1309 | ||
ddcd0973 | 1310 | static struct extra_reg ivbep_uncore_cbox_extra_regs[] = { |
8268fdfc YZ |
1311 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, |
1312 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | |
1313 | SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2), | |
1314 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), | |
1315 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc), | |
1316 | SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc), | |
1317 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | |
1318 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc), | |
1319 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | |
1320 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc), | |
1321 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | |
1322 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc), | |
1323 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10), | |
1324 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), | |
1325 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), | |
1326 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), | |
1327 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), | |
1328 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), | |
1329 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), | |
1330 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), | |
1331 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), | |
1332 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), | |
1333 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), | |
1334 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), | |
1335 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), | |
1336 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), | |
1337 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), | |
1338 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), | |
1339 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), | |
1340 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), | |
1341 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), | |
1342 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), | |
1343 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), | |
1344 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), | |
1345 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), | |
1346 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), | |
1347 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), | |
1348 | EVENT_EXTRA_END | |
1349 | }; | |
1350 | ||
ddcd0973 | 1351 | static u64 ivbep_cbox_filter_mask(int fields) |
8268fdfc YZ |
1352 | { |
1353 | u64 mask = 0; | |
1354 | ||
1355 | if (fields & 0x1) | |
ddcd0973 | 1356 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID; |
8268fdfc | 1357 | if (fields & 0x2) |
ddcd0973 | 1358 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK; |
8268fdfc | 1359 | if (fields & 0x4) |
ddcd0973 | 1360 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE; |
8268fdfc | 1361 | if (fields & 0x8) |
ddcd0973 | 1362 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID; |
7e96ae1a | 1363 | if (fields & 0x10) { |
ddcd0973 | 1364 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC; |
7e96ae1a AK |
1365 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC; |
1366 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6; | |
1367 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC; | |
1368 | } | |
8268fdfc YZ |
1369 | |
1370 | return mask; | |
1371 | } | |
1372 | ||
1373 | static struct event_constraint * | |
ddcd0973 | 1374 | ivbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc | 1375 | { |
ddcd0973 | 1376 | return __snbep_cbox_get_constraint(box, event, ivbep_cbox_filter_mask); |
8268fdfc YZ |
1377 | } |
1378 | ||
ddcd0973 | 1379 | static int ivbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc YZ |
1380 | { |
1381 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
1382 | struct extra_reg *er; | |
1383 | int idx = 0; | |
1384 | ||
ddcd0973 | 1385 | for (er = ivbep_uncore_cbox_extra_regs; er->msr; er++) { |
8268fdfc YZ |
1386 | if (er->event != (event->hw.config & er->config_mask)) |
1387 | continue; | |
1388 | idx |= er->idx; | |
1389 | } | |
1390 | ||
1391 | if (idx) { | |
1392 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + | |
1393 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | |
ddcd0973 | 1394 | reg1->config = event->attr.config1 & ivbep_cbox_filter_mask(idx); |
8268fdfc YZ |
1395 | reg1->idx = idx; |
1396 | } | |
1397 | return 0; | |
1398 | } | |
1399 | ||
ddcd0973 | 1400 | static void ivbep_cbox_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc YZ |
1401 | { |
1402 | struct hw_perf_event *hwc = &event->hw; | |
1403 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
1404 | ||
1405 | if (reg1->idx != EXTRA_REG_NONE) { | |
1406 | u64 filter = uncore_shared_reg_config(box, 0); | |
1407 | wrmsrl(reg1->reg, filter & 0xffffffff); | |
1408 | wrmsrl(reg1->reg + 6, filter >> 32); | |
1409 | } | |
1410 | ||
1411 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | |
1412 | } | |
1413 | ||
ddcd0973 PZ |
1414 | static struct intel_uncore_ops ivbep_uncore_cbox_ops = { |
1415 | .init_box = ivbep_uncore_msr_init_box, | |
8268fdfc YZ |
1416 | .disable_box = snbep_uncore_msr_disable_box, |
1417 | .enable_box = snbep_uncore_msr_enable_box, | |
1418 | .disable_event = snbep_uncore_msr_disable_event, | |
ddcd0973 | 1419 | .enable_event = ivbep_cbox_enable_event, |
8268fdfc | 1420 | .read_counter = uncore_msr_read_counter, |
ddcd0973 PZ |
1421 | .hw_config = ivbep_cbox_hw_config, |
1422 | .get_constraint = ivbep_cbox_get_constraint, | |
8268fdfc YZ |
1423 | .put_constraint = snbep_cbox_put_constraint, |
1424 | }; | |
1425 | ||
ddcd0973 | 1426 | static struct intel_uncore_type ivbep_uncore_cbox = { |
8268fdfc YZ |
1427 | .name = "cbox", |
1428 | .num_counters = 4, | |
1429 | .num_boxes = 15, | |
1430 | .perf_ctr_bits = 44, | |
1431 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, | |
1432 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, | |
ddcd0973 | 1433 | .event_mask = IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK, |
8268fdfc YZ |
1434 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, |
1435 | .msr_offset = SNBEP_CBO_MSR_OFFSET, | |
1436 | .num_shared_regs = 1, | |
1437 | .constraints = snbep_uncore_cbox_constraints, | |
ddcd0973 PZ |
1438 | .ops = &ivbep_uncore_cbox_ops, |
1439 | .format_group = &ivbep_uncore_cbox_format_group, | |
8268fdfc YZ |
1440 | }; |
1441 | ||
ddcd0973 PZ |
1442 | static struct intel_uncore_ops ivbep_uncore_pcu_ops = { |
1443 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
8268fdfc YZ |
1444 | .hw_config = snbep_pcu_hw_config, |
1445 | .get_constraint = snbep_pcu_get_constraint, | |
1446 | .put_constraint = snbep_pcu_put_constraint, | |
1447 | }; | |
1448 | ||
ddcd0973 | 1449 | static struct intel_uncore_type ivbep_uncore_pcu = { |
8268fdfc YZ |
1450 | .name = "pcu", |
1451 | .num_counters = 4, | |
1452 | .num_boxes = 1, | |
1453 | .perf_ctr_bits = 48, | |
1454 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, | |
1455 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, | |
ddcd0973 | 1456 | .event_mask = IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK, |
8268fdfc YZ |
1457 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, |
1458 | .num_shared_regs = 1, | |
ddcd0973 PZ |
1459 | .ops = &ivbep_uncore_pcu_ops, |
1460 | .format_group = &ivbep_uncore_pcu_format_group, | |
8268fdfc YZ |
1461 | }; |
1462 | ||
ddcd0973 PZ |
1463 | static struct intel_uncore_type *ivbep_msr_uncores[] = { |
1464 | &ivbep_uncore_ubox, | |
1465 | &ivbep_uncore_cbox, | |
1466 | &ivbep_uncore_pcu, | |
8268fdfc YZ |
1467 | NULL, |
1468 | }; | |
1469 | ||
ddcd0973 | 1470 | void ivbep_uncore_cpu_init(void) |
8268fdfc | 1471 | { |
ddcd0973 PZ |
1472 | if (ivbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) |
1473 | ivbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | |
1474 | uncore_msr_uncores = ivbep_msr_uncores; | |
8268fdfc YZ |
1475 | } |
1476 | ||
ddcd0973 | 1477 | static struct intel_uncore_type ivbep_uncore_ha = { |
8268fdfc YZ |
1478 | .name = "ha", |
1479 | .num_counters = 4, | |
1480 | .num_boxes = 2, | |
1481 | .perf_ctr_bits = 48, | |
ddcd0973 | 1482 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
8268fdfc YZ |
1483 | }; |
1484 | ||
ddcd0973 | 1485 | static struct intel_uncore_type ivbep_uncore_imc = { |
8268fdfc YZ |
1486 | .name = "imc", |
1487 | .num_counters = 4, | |
1488 | .num_boxes = 8, | |
1489 | .perf_ctr_bits = 48, | |
1490 | .fixed_ctr_bits = 48, | |
1491 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | |
1492 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | |
85a16ef6 | 1493 | .event_descs = snbep_uncore_imc_events, |
ddcd0973 | 1494 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
8268fdfc YZ |
1495 | }; |
1496 | ||
1497 | /* registers in IRP boxes are not properly aligned */ | |
ddcd0973 PZ |
1498 | static unsigned ivbep_uncore_irp_ctls[] = {0xd8, 0xdc, 0xe0, 0xe4}; |
1499 | static unsigned ivbep_uncore_irp_ctrs[] = {0xa0, 0xb0, 0xb8, 0xc0}; | |
8268fdfc | 1500 | |
ddcd0973 | 1501 | static void ivbep_uncore_irp_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc YZ |
1502 | { |
1503 | struct pci_dev *pdev = box->pci_dev; | |
1504 | struct hw_perf_event *hwc = &event->hw; | |
1505 | ||
ddcd0973 | 1506 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], |
8268fdfc YZ |
1507 | hwc->config | SNBEP_PMON_CTL_EN); |
1508 | } | |
1509 | ||
ddcd0973 | 1510 | static void ivbep_uncore_irp_disable_event(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc YZ |
1511 | { |
1512 | struct pci_dev *pdev = box->pci_dev; | |
1513 | struct hw_perf_event *hwc = &event->hw; | |
1514 | ||
ddcd0973 | 1515 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config); |
8268fdfc YZ |
1516 | } |
1517 | ||
ddcd0973 | 1518 | static u64 ivbep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) |
8268fdfc YZ |
1519 | { |
1520 | struct pci_dev *pdev = box->pci_dev; | |
1521 | struct hw_perf_event *hwc = &event->hw; | |
1522 | u64 count = 0; | |
1523 | ||
ddcd0973 PZ |
1524 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); |
1525 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); | |
8268fdfc YZ |
1526 | |
1527 | return count; | |
1528 | } | |
1529 | ||
ddcd0973 PZ |
1530 | static struct intel_uncore_ops ivbep_uncore_irp_ops = { |
1531 | .init_box = ivbep_uncore_pci_init_box, | |
8268fdfc YZ |
1532 | .disable_box = snbep_uncore_pci_disable_box, |
1533 | .enable_box = snbep_uncore_pci_enable_box, | |
ddcd0973 PZ |
1534 | .disable_event = ivbep_uncore_irp_disable_event, |
1535 | .enable_event = ivbep_uncore_irp_enable_event, | |
1536 | .read_counter = ivbep_uncore_irp_read_counter, | |
8268fdfc YZ |
1537 | }; |
1538 | ||
ddcd0973 | 1539 | static struct intel_uncore_type ivbep_uncore_irp = { |
8268fdfc YZ |
1540 | .name = "irp", |
1541 | .num_counters = 4, | |
1542 | .num_boxes = 1, | |
1543 | .perf_ctr_bits = 48, | |
ddcd0973 | 1544 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, |
8268fdfc | 1545 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
ddcd0973 PZ |
1546 | .ops = &ivbep_uncore_irp_ops, |
1547 | .format_group = &ivbep_uncore_format_group, | |
8268fdfc YZ |
1548 | }; |
1549 | ||
ddcd0973 PZ |
1550 | static struct intel_uncore_ops ivbep_uncore_qpi_ops = { |
1551 | .init_box = ivbep_uncore_pci_init_box, | |
8268fdfc YZ |
1552 | .disable_box = snbep_uncore_pci_disable_box, |
1553 | .enable_box = snbep_uncore_pci_enable_box, | |
1554 | .disable_event = snbep_uncore_pci_disable_event, | |
1555 | .enable_event = snbep_qpi_enable_event, | |
1556 | .read_counter = snbep_uncore_pci_read_counter, | |
1557 | .hw_config = snbep_qpi_hw_config, | |
1558 | .get_constraint = uncore_get_constraint, | |
1559 | .put_constraint = uncore_put_constraint, | |
1560 | }; | |
1561 | ||
ddcd0973 | 1562 | static struct intel_uncore_type ivbep_uncore_qpi = { |
8268fdfc YZ |
1563 | .name = "qpi", |
1564 | .num_counters = 4, | |
1565 | .num_boxes = 3, | |
1566 | .perf_ctr_bits = 48, | |
1567 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | |
1568 | .event_ctl = SNBEP_PCI_PMON_CTL0, | |
ddcd0973 | 1569 | .event_mask = IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK, |
8268fdfc YZ |
1570 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
1571 | .num_shared_regs = 1, | |
ddcd0973 PZ |
1572 | .ops = &ivbep_uncore_qpi_ops, |
1573 | .format_group = &ivbep_uncore_qpi_format_group, | |
8268fdfc YZ |
1574 | }; |
1575 | ||
ddcd0973 | 1576 | static struct intel_uncore_type ivbep_uncore_r2pcie = { |
8268fdfc YZ |
1577 | .name = "r2pcie", |
1578 | .num_counters = 4, | |
1579 | .num_boxes = 1, | |
1580 | .perf_ctr_bits = 44, | |
1581 | .constraints = snbep_uncore_r2pcie_constraints, | |
ddcd0973 | 1582 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
8268fdfc YZ |
1583 | }; |
1584 | ||
ddcd0973 | 1585 | static struct intel_uncore_type ivbep_uncore_r3qpi = { |
8268fdfc YZ |
1586 | .name = "r3qpi", |
1587 | .num_counters = 3, | |
1588 | .num_boxes = 2, | |
1589 | .perf_ctr_bits = 44, | |
1590 | .constraints = snbep_uncore_r3qpi_constraints, | |
ddcd0973 | 1591 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
8268fdfc YZ |
1592 | }; |
1593 | ||
1594 | enum { | |
ddcd0973 PZ |
1595 | IVBEP_PCI_UNCORE_HA, |
1596 | IVBEP_PCI_UNCORE_IMC, | |
1597 | IVBEP_PCI_UNCORE_IRP, | |
1598 | IVBEP_PCI_UNCORE_QPI, | |
1599 | IVBEP_PCI_UNCORE_R2PCIE, | |
1600 | IVBEP_PCI_UNCORE_R3QPI, | |
1601 | }; | |
1602 | ||
1603 | static struct intel_uncore_type *ivbep_pci_uncores[] = { | |
1604 | [IVBEP_PCI_UNCORE_HA] = &ivbep_uncore_ha, | |
1605 | [IVBEP_PCI_UNCORE_IMC] = &ivbep_uncore_imc, | |
1606 | [IVBEP_PCI_UNCORE_IRP] = &ivbep_uncore_irp, | |
1607 | [IVBEP_PCI_UNCORE_QPI] = &ivbep_uncore_qpi, | |
1608 | [IVBEP_PCI_UNCORE_R2PCIE] = &ivbep_uncore_r2pcie, | |
1609 | [IVBEP_PCI_UNCORE_R3QPI] = &ivbep_uncore_r3qpi, | |
8268fdfc YZ |
1610 | NULL, |
1611 | }; | |
1612 | ||
83bc90e1 | 1613 | static const struct pci_device_id ivbep_uncore_pci_ids[] = { |
8268fdfc YZ |
1614 | { /* Home Agent 0 */ |
1615 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe30), | |
ddcd0973 | 1616 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 0), |
8268fdfc YZ |
1617 | }, |
1618 | { /* Home Agent 1 */ | |
1619 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe38), | |
ddcd0973 | 1620 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 1), |
8268fdfc YZ |
1621 | }, |
1622 | { /* MC0 Channel 0 */ | |
1623 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb4), | |
ddcd0973 | 1624 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 0), |
8268fdfc YZ |
1625 | }, |
1626 | { /* MC0 Channel 1 */ | |
1627 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb5), | |
ddcd0973 | 1628 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 1), |
8268fdfc YZ |
1629 | }, |
1630 | { /* MC0 Channel 3 */ | |
1631 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb0), | |
ddcd0973 | 1632 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 2), |
8268fdfc YZ |
1633 | }, |
1634 | { /* MC0 Channel 4 */ | |
1635 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb1), | |
ddcd0973 | 1636 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 3), |
8268fdfc YZ |
1637 | }, |
1638 | { /* MC1 Channel 0 */ | |
1639 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef4), | |
ddcd0973 | 1640 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 4), |
8268fdfc YZ |
1641 | }, |
1642 | { /* MC1 Channel 1 */ | |
1643 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef5), | |
ddcd0973 | 1644 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 5), |
8268fdfc YZ |
1645 | }, |
1646 | { /* MC1 Channel 3 */ | |
1647 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef0), | |
ddcd0973 | 1648 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 6), |
8268fdfc YZ |
1649 | }, |
1650 | { /* MC1 Channel 4 */ | |
1651 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef1), | |
ddcd0973 | 1652 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 7), |
8268fdfc YZ |
1653 | }, |
1654 | { /* IRP */ | |
1655 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe39), | |
ddcd0973 | 1656 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP, 0), |
8268fdfc YZ |
1657 | }, |
1658 | { /* QPI0 Port 0 */ | |
1659 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe32), | |
ddcd0973 | 1660 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 0), |
8268fdfc YZ |
1661 | }, |
1662 | { /* QPI0 Port 1 */ | |
1663 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe33), | |
ddcd0973 | 1664 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 1), |
8268fdfc YZ |
1665 | }, |
1666 | { /* QPI1 Port 2 */ | |
1667 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3a), | |
ddcd0973 | 1668 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 2), |
8268fdfc YZ |
1669 | }, |
1670 | { /* R2PCIe */ | |
1671 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe34), | |
ddcd0973 | 1672 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE, 0), |
8268fdfc YZ |
1673 | }, |
1674 | { /* R3QPI0 Link 0 */ | |
1675 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe36), | |
ddcd0973 | 1676 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 0), |
8268fdfc YZ |
1677 | }, |
1678 | { /* R3QPI0 Link 1 */ | |
1679 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe37), | |
ddcd0973 | 1680 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 1), |
8268fdfc YZ |
1681 | }, |
1682 | { /* R3QPI1 Link 2 */ | |
1683 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3e), | |
ddcd0973 | 1684 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 2), |
8268fdfc YZ |
1685 | }, |
1686 | { /* QPI Port 0 filter */ | |
1687 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe86), | |
1688 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
1689 | SNBEP_PCI_QPI_PORT0_FILTER), | |
1690 | }, | |
1691 | { /* QPI Port 0 filter */ | |
1692 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe96), | |
1693 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
1694 | SNBEP_PCI_QPI_PORT1_FILTER), | |
1695 | }, | |
1696 | { /* end: all zeroes */ } | |
1697 | }; | |
1698 | ||
ddcd0973 PZ |
1699 | static struct pci_driver ivbep_uncore_pci_driver = { |
1700 | .name = "ivbep_uncore", | |
1701 | .id_table = ivbep_uncore_pci_ids, | |
8268fdfc YZ |
1702 | }; |
1703 | ||
ddcd0973 | 1704 | int ivbep_uncore_pci_init(void) |
8268fdfc YZ |
1705 | { |
1706 | int ret = snbep_pci2phy_map_init(0x0e1e); | |
1707 | if (ret) | |
1708 | return ret; | |
ddcd0973 PZ |
1709 | uncore_pci_uncores = ivbep_pci_uncores; |
1710 | uncore_pci_driver = &ivbep_uncore_pci_driver; | |
8268fdfc YZ |
1711 | return 0; |
1712 | } | |
1713 | /* end of IvyTown uncore support */ | |
e735b9db YZ |
1714 | |
1715 | /* Haswell-EP uncore support */ | |
1716 | static struct attribute *hswep_uncore_ubox_formats_attr[] = { | |
1717 | &format_attr_event.attr, | |
1718 | &format_attr_umask.attr, | |
1719 | &format_attr_edge.attr, | |
1720 | &format_attr_inv.attr, | |
1721 | &format_attr_thresh5.attr, | |
1722 | &format_attr_filter_tid2.attr, | |
1723 | &format_attr_filter_cid.attr, | |
1724 | NULL, | |
1725 | }; | |
1726 | ||
1727 | static struct attribute_group hswep_uncore_ubox_format_group = { | |
1728 | .name = "format", | |
1729 | .attrs = hswep_uncore_ubox_formats_attr, | |
1730 | }; | |
1731 | ||
1732 | static int hswep_ubox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
1733 | { | |
1734 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
1735 | reg1->reg = HSWEP_U_MSR_PMON_FILTER; | |
1736 | reg1->config = event->attr.config1 & HSWEP_U_MSR_PMON_BOX_FILTER_MASK; | |
1737 | reg1->idx = 0; | |
1738 | return 0; | |
1739 | } | |
1740 | ||
1741 | static struct intel_uncore_ops hswep_uncore_ubox_ops = { | |
1742 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
1743 | .hw_config = hswep_ubox_hw_config, | |
1744 | .get_constraint = uncore_get_constraint, | |
1745 | .put_constraint = uncore_put_constraint, | |
1746 | }; | |
1747 | ||
1748 | static struct intel_uncore_type hswep_uncore_ubox = { | |
1749 | .name = "ubox", | |
1750 | .num_counters = 2, | |
1751 | .num_boxes = 1, | |
1752 | .perf_ctr_bits = 44, | |
1753 | .fixed_ctr_bits = 48, | |
1754 | .perf_ctr = HSWEP_U_MSR_PMON_CTR0, | |
1755 | .event_ctl = HSWEP_U_MSR_PMON_CTL0, | |
1756 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, | |
1757 | .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR, | |
1758 | .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL, | |
1759 | .num_shared_regs = 1, | |
1760 | .ops = &hswep_uncore_ubox_ops, | |
1761 | .format_group = &hswep_uncore_ubox_format_group, | |
1762 | }; | |
1763 | ||
1764 | static struct attribute *hswep_uncore_cbox_formats_attr[] = { | |
1765 | &format_attr_event.attr, | |
1766 | &format_attr_umask.attr, | |
1767 | &format_attr_edge.attr, | |
1768 | &format_attr_tid_en.attr, | |
1769 | &format_attr_thresh8.attr, | |
1770 | &format_attr_filter_tid3.attr, | |
1771 | &format_attr_filter_link2.attr, | |
1772 | &format_attr_filter_state3.attr, | |
1773 | &format_attr_filter_nid2.attr, | |
1774 | &format_attr_filter_opc2.attr, | |
1775 | &format_attr_filter_nc.attr, | |
1776 | &format_attr_filter_c6.attr, | |
1777 | &format_attr_filter_isoc.attr, | |
1778 | NULL, | |
1779 | }; | |
1780 | ||
1781 | static struct attribute_group hswep_uncore_cbox_format_group = { | |
1782 | .name = "format", | |
1783 | .attrs = hswep_uncore_cbox_formats_attr, | |
1784 | }; | |
1785 | ||
1786 | static struct event_constraint hswep_uncore_cbox_constraints[] = { | |
1787 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), | |
1788 | UNCORE_EVENT_CONSTRAINT(0x09, 0x1), | |
1789 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), | |
1790 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), | |
1791 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | |
1792 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), | |
1793 | UNCORE_EVENT_CONSTRAINT(0x3e, 0x1), | |
1794 | EVENT_CONSTRAINT_END | |
1795 | }; | |
1796 | ||
1797 | static struct extra_reg hswep_uncore_cbox_extra_regs[] = { | |
1798 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, | |
1799 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | |
1800 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | |
1801 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | |
1802 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | |
1803 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), | |
1804 | SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4), | |
1805 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4), | |
1806 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), | |
1807 | SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8), | |
1808 | SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8), | |
1809 | SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8), | |
1810 | SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8), | |
1811 | SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8), | |
1812 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12), | |
1813 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), | |
1814 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), | |
1815 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), | |
1816 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), | |
1817 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), | |
1818 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), | |
1819 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), | |
1820 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), | |
1821 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), | |
1822 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), | |
1823 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), | |
1824 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), | |
1825 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), | |
1826 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), | |
1827 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), | |
1828 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), | |
1829 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), | |
1830 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), | |
1831 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), | |
1832 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), | |
1833 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), | |
1834 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), | |
1835 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), | |
1836 | EVENT_EXTRA_END | |
1837 | }; | |
1838 | ||
1839 | static u64 hswep_cbox_filter_mask(int fields) | |
1840 | { | |
1841 | u64 mask = 0; | |
1842 | if (fields & 0x1) | |
1843 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID; | |
1844 | if (fields & 0x2) | |
1845 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK; | |
1846 | if (fields & 0x4) | |
1847 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE; | |
1848 | if (fields & 0x8) | |
1849 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID; | |
1850 | if (fields & 0x10) { | |
1851 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC; | |
1852 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC; | |
1853 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6; | |
1854 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC; | |
1855 | } | |
1856 | return mask; | |
1857 | } | |
1858 | ||
1859 | static struct event_constraint * | |
1860 | hswep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | |
1861 | { | |
1862 | return __snbep_cbox_get_constraint(box, event, hswep_cbox_filter_mask); | |
1863 | } | |
1864 | ||
1865 | static int hswep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
1866 | { | |
1867 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | |
1868 | struct extra_reg *er; | |
1869 | int idx = 0; | |
1870 | ||
1871 | for (er = hswep_uncore_cbox_extra_regs; er->msr; er++) { | |
1872 | if (er->event != (event->hw.config & er->config_mask)) | |
1873 | continue; | |
1874 | idx |= er->idx; | |
1875 | } | |
1876 | ||
1877 | if (idx) { | |
1878 | reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 + | |
1879 | HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | |
1880 | reg1->config = event->attr.config1 & hswep_cbox_filter_mask(idx); | |
1881 | reg1->idx = idx; | |
1882 | } | |
1883 | return 0; | |
1884 | } | |
1885 | ||
1886 | static void hswep_cbox_enable_event(struct intel_uncore_box *box, | |
1887 | struct perf_event *event) | |
1888 | { | |
1889 | struct hw_perf_event *hwc = &event->hw; | |
1890 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
1891 | ||
1892 | if (reg1->idx != EXTRA_REG_NONE) { | |
1893 | u64 filter = uncore_shared_reg_config(box, 0); | |
1894 | wrmsrl(reg1->reg, filter & 0xffffffff); | |
1895 | wrmsrl(reg1->reg + 1, filter >> 32); | |
1896 | } | |
1897 | ||
1898 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | |
1899 | } | |
1900 | ||
1901 | static struct intel_uncore_ops hswep_uncore_cbox_ops = { | |
1902 | .init_box = snbep_uncore_msr_init_box, | |
1903 | .disable_box = snbep_uncore_msr_disable_box, | |
1904 | .enable_box = snbep_uncore_msr_enable_box, | |
1905 | .disable_event = snbep_uncore_msr_disable_event, | |
1906 | .enable_event = hswep_cbox_enable_event, | |
1907 | .read_counter = uncore_msr_read_counter, | |
1908 | .hw_config = hswep_cbox_hw_config, | |
1909 | .get_constraint = hswep_cbox_get_constraint, | |
1910 | .put_constraint = snbep_cbox_put_constraint, | |
1911 | }; | |
1912 | ||
1913 | static struct intel_uncore_type hswep_uncore_cbox = { | |
1914 | .name = "cbox", | |
1915 | .num_counters = 4, | |
1916 | .num_boxes = 18, | |
8cf1a3de | 1917 | .perf_ctr_bits = 48, |
e735b9db YZ |
1918 | .event_ctl = HSWEP_C0_MSR_PMON_CTL0, |
1919 | .perf_ctr = HSWEP_C0_MSR_PMON_CTR0, | |
1920 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | |
1921 | .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL, | |
1922 | .msr_offset = HSWEP_CBO_MSR_OFFSET, | |
1923 | .num_shared_regs = 1, | |
1924 | .constraints = hswep_uncore_cbox_constraints, | |
1925 | .ops = &hswep_uncore_cbox_ops, | |
1926 | .format_group = &hswep_uncore_cbox_format_group, | |
1927 | }; | |
1928 | ||
68055915 AK |
1929 | /* |
1930 | * Write SBOX Initialization register bit by bit to avoid spurious #GPs | |
1931 | */ | |
1932 | static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box *box) | |
1933 | { | |
1934 | unsigned msr = uncore_msr_box_ctl(box); | |
1935 | ||
1936 | if (msr) { | |
1937 | u64 init = SNBEP_PMON_BOX_CTL_INT; | |
1938 | u64 flags = 0; | |
1939 | int i; | |
1940 | ||
1941 | for_each_set_bit(i, (unsigned long *)&init, 64) { | |
1942 | flags |= (1ULL << i); | |
1943 | wrmsrl(msr, flags); | |
1944 | } | |
1945 | } | |
1946 | } | |
1947 | ||
1948 | static struct intel_uncore_ops hswep_uncore_sbox_msr_ops = { | |
1949 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
1950 | .init_box = hswep_uncore_sbox_msr_init_box | |
1951 | }; | |
1952 | ||
e735b9db YZ |
1953 | static struct attribute *hswep_uncore_sbox_formats_attr[] = { |
1954 | &format_attr_event.attr, | |
1955 | &format_attr_umask.attr, | |
1956 | &format_attr_edge.attr, | |
1957 | &format_attr_tid_en.attr, | |
1958 | &format_attr_inv.attr, | |
1959 | &format_attr_thresh8.attr, | |
1960 | NULL, | |
1961 | }; | |
1962 | ||
1963 | static struct attribute_group hswep_uncore_sbox_format_group = { | |
1964 | .name = "format", | |
1965 | .attrs = hswep_uncore_sbox_formats_attr, | |
1966 | }; | |
1967 | ||
1968 | static struct intel_uncore_type hswep_uncore_sbox = { | |
1969 | .name = "sbox", | |
1970 | .num_counters = 4, | |
1971 | .num_boxes = 4, | |
1972 | .perf_ctr_bits = 44, | |
1973 | .event_ctl = HSWEP_S0_MSR_PMON_CTL0, | |
1974 | .perf_ctr = HSWEP_S0_MSR_PMON_CTR0, | |
1975 | .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK, | |
1976 | .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL, | |
1977 | .msr_offset = HSWEP_SBOX_MSR_OFFSET, | |
68055915 | 1978 | .ops = &hswep_uncore_sbox_msr_ops, |
e735b9db YZ |
1979 | .format_group = &hswep_uncore_sbox_format_group, |
1980 | }; | |
1981 | ||
1982 | static int hswep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) | |
1983 | { | |
1984 | struct hw_perf_event *hwc = &event->hw; | |
1985 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | |
1986 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; | |
1987 | ||
1988 | if (ev_sel >= 0xb && ev_sel <= 0xe) { | |
1989 | reg1->reg = HSWEP_PCU_MSR_PMON_BOX_FILTER; | |
1990 | reg1->idx = ev_sel - 0xb; | |
1991 | reg1->config = event->attr.config1 & (0xff << reg1->idx); | |
1992 | } | |
1993 | return 0; | |
1994 | } | |
1995 | ||
1996 | static struct intel_uncore_ops hswep_uncore_pcu_ops = { | |
1997 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | |
1998 | .hw_config = hswep_pcu_hw_config, | |
1999 | .get_constraint = snbep_pcu_get_constraint, | |
2000 | .put_constraint = snbep_pcu_put_constraint, | |
2001 | }; | |
2002 | ||
2003 | static struct intel_uncore_type hswep_uncore_pcu = { | |
2004 | .name = "pcu", | |
2005 | .num_counters = 4, | |
2006 | .num_boxes = 1, | |
2007 | .perf_ctr_bits = 48, | |
2008 | .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0, | |
2009 | .event_ctl = HSWEP_PCU_MSR_PMON_CTL0, | |
2010 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, | |
2011 | .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL, | |
2012 | .num_shared_regs = 1, | |
2013 | .ops = &hswep_uncore_pcu_ops, | |
2014 | .format_group = &snbep_uncore_pcu_format_group, | |
2015 | }; | |
2016 | ||
2017 | static struct intel_uncore_type *hswep_msr_uncores[] = { | |
2018 | &hswep_uncore_ubox, | |
2019 | &hswep_uncore_cbox, | |
2020 | &hswep_uncore_sbox, | |
2021 | &hswep_uncore_pcu, | |
2022 | NULL, | |
2023 | }; | |
2024 | ||
2025 | void hswep_uncore_cpu_init(void) | |
2026 | { | |
2027 | if (hswep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | |
2028 | hswep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | |
5306c31c AK |
2029 | |
2030 | /* Detect 6-8 core systems with only two SBOXes */ | |
2031 | if (uncore_extra_pci_dev[0][HSWEP_PCI_PCU_3]) { | |
2032 | u32 capid4; | |
2033 | ||
2034 | pci_read_config_dword(uncore_extra_pci_dev[0][HSWEP_PCI_PCU_3], | |
2035 | 0x94, &capid4); | |
2036 | if (((capid4 >> 6) & 0x3) == 0) | |
2037 | hswep_uncore_sbox.num_boxes = 2; | |
2038 | } | |
2039 | ||
e735b9db YZ |
2040 | uncore_msr_uncores = hswep_msr_uncores; |
2041 | } | |
2042 | ||
2043 | static struct intel_uncore_type hswep_uncore_ha = { | |
2044 | .name = "ha", | |
2045 | .num_counters = 5, | |
2046 | .num_boxes = 2, | |
2047 | .perf_ctr_bits = 48, | |
2048 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2049 | }; | |
2050 | ||
2051 | static struct uncore_event_desc hswep_uncore_imc_events[] = { | |
2052 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x00,umask=0x00"), | |
2053 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), | |
c0737ce4 AK |
2054 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), |
2055 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), | |
e735b9db | 2056 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), |
c0737ce4 AK |
2057 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), |
2058 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), | |
e735b9db YZ |
2059 | { /* end: all zeroes */ }, |
2060 | }; | |
2061 | ||
2062 | static struct intel_uncore_type hswep_uncore_imc = { | |
2063 | .name = "imc", | |
2064 | .num_counters = 5, | |
2065 | .num_boxes = 8, | |
2066 | .perf_ctr_bits = 48, | |
2067 | .fixed_ctr_bits = 48, | |
2068 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | |
2069 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | |
2070 | .event_descs = hswep_uncore_imc_events, | |
2071 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2072 | }; | |
2073 | ||
41a134a5 AK |
2074 | static unsigned hswep_uncore_irp_ctrs[] = {0xa0, 0xa8, 0xb0, 0xb8}; |
2075 | ||
2076 | static u64 hswep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) | |
2077 | { | |
2078 | struct pci_dev *pdev = box->pci_dev; | |
2079 | struct hw_perf_event *hwc = &event->hw; | |
2080 | u64 count = 0; | |
2081 | ||
2082 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); | |
2083 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); | |
2084 | ||
2085 | return count; | |
2086 | } | |
2087 | ||
e735b9db YZ |
2088 | static struct intel_uncore_ops hswep_uncore_irp_ops = { |
2089 | .init_box = snbep_uncore_pci_init_box, | |
2090 | .disable_box = snbep_uncore_pci_disable_box, | |
2091 | .enable_box = snbep_uncore_pci_enable_box, | |
2092 | .disable_event = ivbep_uncore_irp_disable_event, | |
2093 | .enable_event = ivbep_uncore_irp_enable_event, | |
41a134a5 | 2094 | .read_counter = hswep_uncore_irp_read_counter, |
e735b9db YZ |
2095 | }; |
2096 | ||
2097 | static struct intel_uncore_type hswep_uncore_irp = { | |
2098 | .name = "irp", | |
2099 | .num_counters = 4, | |
2100 | .num_boxes = 1, | |
2101 | .perf_ctr_bits = 48, | |
2102 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, | |
2103 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | |
2104 | .ops = &hswep_uncore_irp_ops, | |
2105 | .format_group = &snbep_uncore_format_group, | |
2106 | }; | |
2107 | ||
2108 | static struct intel_uncore_type hswep_uncore_qpi = { | |
2109 | .name = "qpi", | |
2110 | .num_counters = 5, | |
2111 | .num_boxes = 3, | |
2112 | .perf_ctr_bits = 48, | |
2113 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | |
2114 | .event_ctl = SNBEP_PCI_PMON_CTL0, | |
2115 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, | |
2116 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | |
2117 | .num_shared_regs = 1, | |
2118 | .ops = &snbep_uncore_qpi_ops, | |
2119 | .format_group = &snbep_uncore_qpi_format_group, | |
2120 | }; | |
2121 | ||
2122 | static struct event_constraint hswep_uncore_r2pcie_constraints[] = { | |
2123 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | |
2124 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | |
2125 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | |
2126 | UNCORE_EVENT_CONSTRAINT(0x23, 0x1), | |
2127 | UNCORE_EVENT_CONSTRAINT(0x24, 0x1), | |
2128 | UNCORE_EVENT_CONSTRAINT(0x25, 0x1), | |
2129 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | |
2130 | UNCORE_EVENT_CONSTRAINT(0x27, 0x1), | |
2131 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | |
2132 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | |
2133 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x1), | |
2134 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), | |
2135 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | |
2136 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | |
2137 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | |
2138 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | |
2139 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | |
2140 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), | |
2141 | EVENT_CONSTRAINT_END | |
2142 | }; | |
2143 | ||
2144 | static struct intel_uncore_type hswep_uncore_r2pcie = { | |
2145 | .name = "r2pcie", | |
2146 | .num_counters = 4, | |
2147 | .num_boxes = 1, | |
2148 | .perf_ctr_bits = 48, | |
2149 | .constraints = hswep_uncore_r2pcie_constraints, | |
2150 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2151 | }; | |
2152 | ||
2153 | static struct event_constraint hswep_uncore_r3qpi_constraints[] = { | |
2154 | UNCORE_EVENT_CONSTRAINT(0x01, 0x3), | |
2155 | UNCORE_EVENT_CONSTRAINT(0x07, 0x7), | |
2156 | UNCORE_EVENT_CONSTRAINT(0x08, 0x7), | |
2157 | UNCORE_EVENT_CONSTRAINT(0x09, 0x7), | |
2158 | UNCORE_EVENT_CONSTRAINT(0x0a, 0x7), | |
2159 | UNCORE_EVENT_CONSTRAINT(0x0e, 0x7), | |
2160 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | |
2161 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | |
2162 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | |
2163 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | |
2164 | UNCORE_EVENT_CONSTRAINT(0x14, 0x3), | |
2165 | UNCORE_EVENT_CONSTRAINT(0x15, 0x3), | |
2166 | UNCORE_EVENT_CONSTRAINT(0x1f, 0x3), | |
2167 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), | |
2168 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | |
2169 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), | |
2170 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | |
2171 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | |
2172 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | |
2173 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | |
2174 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | |
2175 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | |
2176 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | |
2177 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), | |
2178 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), | |
2179 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | |
2180 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | |
2181 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | |
2182 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | |
2183 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), | |
2184 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | |
2185 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | |
2186 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | |
2187 | EVENT_CONSTRAINT_END | |
2188 | }; | |
2189 | ||
2190 | static struct intel_uncore_type hswep_uncore_r3qpi = { | |
2191 | .name = "r3qpi", | |
2192 | .num_counters = 4, | |
2193 | .num_boxes = 3, | |
2194 | .perf_ctr_bits = 44, | |
2195 | .constraints = hswep_uncore_r3qpi_constraints, | |
2196 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2197 | }; | |
2198 | ||
2199 | enum { | |
2200 | HSWEP_PCI_UNCORE_HA, | |
2201 | HSWEP_PCI_UNCORE_IMC, | |
2202 | HSWEP_PCI_UNCORE_IRP, | |
2203 | HSWEP_PCI_UNCORE_QPI, | |
2204 | HSWEP_PCI_UNCORE_R2PCIE, | |
2205 | HSWEP_PCI_UNCORE_R3QPI, | |
2206 | }; | |
2207 | ||
2208 | static struct intel_uncore_type *hswep_pci_uncores[] = { | |
2209 | [HSWEP_PCI_UNCORE_HA] = &hswep_uncore_ha, | |
2210 | [HSWEP_PCI_UNCORE_IMC] = &hswep_uncore_imc, | |
2211 | [HSWEP_PCI_UNCORE_IRP] = &hswep_uncore_irp, | |
2212 | [HSWEP_PCI_UNCORE_QPI] = &hswep_uncore_qpi, | |
2213 | [HSWEP_PCI_UNCORE_R2PCIE] = &hswep_uncore_r2pcie, | |
2214 | [HSWEP_PCI_UNCORE_R3QPI] = &hswep_uncore_r3qpi, | |
2215 | NULL, | |
2216 | }; | |
2217 | ||
070a7cdf | 2218 | static const struct pci_device_id hswep_uncore_pci_ids[] = { |
e735b9db YZ |
2219 | { /* Home Agent 0 */ |
2220 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f30), | |
2221 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 0), | |
2222 | }, | |
2223 | { /* Home Agent 1 */ | |
2224 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f38), | |
2225 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 1), | |
2226 | }, | |
2227 | { /* MC0 Channel 0 */ | |
2228 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb0), | |
2229 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 0), | |
2230 | }, | |
2231 | { /* MC0 Channel 1 */ | |
2232 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb1), | |
2233 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 1), | |
2234 | }, | |
2235 | { /* MC0 Channel 2 */ | |
2236 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb4), | |
2237 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 2), | |
2238 | }, | |
2239 | { /* MC0 Channel 3 */ | |
2240 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb5), | |
2241 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 3), | |
2242 | }, | |
2243 | { /* MC1 Channel 0 */ | |
2244 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd0), | |
2245 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 4), | |
2246 | }, | |
2247 | { /* MC1 Channel 1 */ | |
2248 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd1), | |
2249 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 5), | |
2250 | }, | |
2251 | { /* MC1 Channel 2 */ | |
2252 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd4), | |
2253 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 6), | |
2254 | }, | |
2255 | { /* MC1 Channel 3 */ | |
2256 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd5), | |
2257 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 7), | |
2258 | }, | |
2259 | { /* IRP */ | |
2260 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f39), | |
2261 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP, 0), | |
2262 | }, | |
2263 | { /* QPI0 Port 0 */ | |
2264 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f32), | |
2265 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 0), | |
2266 | }, | |
2267 | { /* QPI0 Port 1 */ | |
2268 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f33), | |
2269 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 1), | |
2270 | }, | |
2271 | { /* QPI1 Port 2 */ | |
2272 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3a), | |
2273 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 2), | |
2274 | }, | |
2275 | { /* R2PCIe */ | |
2276 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f34), | |
2277 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE, 0), | |
2278 | }, | |
2279 | { /* R3QPI0 Link 0 */ | |
2280 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f36), | |
2281 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 0), | |
2282 | }, | |
2283 | { /* R3QPI0 Link 1 */ | |
2284 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f37), | |
2285 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 1), | |
2286 | }, | |
2287 | { /* R3QPI1 Link 2 */ | |
2288 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3e), | |
2289 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 2), | |
2290 | }, | |
2291 | { /* QPI Port 0 filter */ | |
2292 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f86), | |
2293 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
2294 | SNBEP_PCI_QPI_PORT0_FILTER), | |
2295 | }, | |
2296 | { /* QPI Port 1 filter */ | |
2297 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f96), | |
2298 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
2299 | SNBEP_PCI_QPI_PORT1_FILTER), | |
2300 | }, | |
5306c31c AK |
2301 | { /* PCU.3 (for Capability registers) */ |
2302 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fc0), | |
2303 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | |
2304 | HSWEP_PCI_PCU_3), | |
2305 | }, | |
e735b9db YZ |
2306 | { /* end: all zeroes */ } |
2307 | }; | |
2308 | ||
2309 | static struct pci_driver hswep_uncore_pci_driver = { | |
2310 | .name = "hswep_uncore", | |
2311 | .id_table = hswep_uncore_pci_ids, | |
2312 | }; | |
2313 | ||
2314 | int hswep_uncore_pci_init(void) | |
2315 | { | |
2316 | int ret = snbep_pci2phy_map_init(0x2f1e); | |
2317 | if (ret) | |
2318 | return ret; | |
2319 | uncore_pci_uncores = hswep_pci_uncores; | |
2320 | uncore_pci_driver = &hswep_uncore_pci_driver; | |
2321 | return 0; | |
2322 | } | |
2323 | /* end of Haswell-EP uncore support */ | |
070e9887 KL |
2324 | |
2325 | /* BDX-DE uncore support */ | |
2326 | ||
2327 | static struct intel_uncore_type bdx_uncore_ubox = { | |
2328 | .name = "ubox", | |
2329 | .num_counters = 2, | |
2330 | .num_boxes = 1, | |
2331 | .perf_ctr_bits = 48, | |
2332 | .fixed_ctr_bits = 48, | |
2333 | .perf_ctr = HSWEP_U_MSR_PMON_CTR0, | |
2334 | .event_ctl = HSWEP_U_MSR_PMON_CTL0, | |
2335 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, | |
2336 | .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR, | |
2337 | .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL, | |
2338 | .num_shared_regs = 1, | |
2339 | .ops = &ivbep_uncore_msr_ops, | |
2340 | .format_group = &ivbep_uncore_ubox_format_group, | |
2341 | }; | |
2342 | ||
2343 | static struct event_constraint bdx_uncore_cbox_constraints[] = { | |
2344 | UNCORE_EVENT_CONSTRAINT(0x09, 0x3), | |
2345 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), | |
2346 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), | |
2347 | EVENT_CONSTRAINT_END | |
2348 | }; | |
2349 | ||
2350 | static struct intel_uncore_type bdx_uncore_cbox = { | |
2351 | .name = "cbox", | |
2352 | .num_counters = 4, | |
2353 | .num_boxes = 8, | |
2354 | .perf_ctr_bits = 48, | |
2355 | .event_ctl = HSWEP_C0_MSR_PMON_CTL0, | |
2356 | .perf_ctr = HSWEP_C0_MSR_PMON_CTR0, | |
2357 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | |
2358 | .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL, | |
2359 | .msr_offset = HSWEP_CBO_MSR_OFFSET, | |
2360 | .num_shared_regs = 1, | |
2361 | .constraints = bdx_uncore_cbox_constraints, | |
2362 | .ops = &hswep_uncore_cbox_ops, | |
2363 | .format_group = &hswep_uncore_cbox_format_group, | |
2364 | }; | |
2365 | ||
2366 | static struct intel_uncore_type *bdx_msr_uncores[] = { | |
2367 | &bdx_uncore_ubox, | |
2368 | &bdx_uncore_cbox, | |
2369 | &hswep_uncore_pcu, | |
2370 | NULL, | |
2371 | }; | |
2372 | ||
2373 | void bdx_uncore_cpu_init(void) | |
2374 | { | |
2375 | if (bdx_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | |
2376 | bdx_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | |
2377 | uncore_msr_uncores = bdx_msr_uncores; | |
2378 | } | |
2379 | ||
2380 | static struct intel_uncore_type bdx_uncore_ha = { | |
2381 | .name = "ha", | |
2382 | .num_counters = 4, | |
2383 | .num_boxes = 1, | |
2384 | .perf_ctr_bits = 48, | |
2385 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2386 | }; | |
2387 | ||
2388 | static struct intel_uncore_type bdx_uncore_imc = { | |
2389 | .name = "imc", | |
2390 | .num_counters = 5, | |
2391 | .num_boxes = 2, | |
2392 | .perf_ctr_bits = 48, | |
2393 | .fixed_ctr_bits = 48, | |
2394 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | |
2395 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | |
2396 | .event_descs = hswep_uncore_imc_events, | |
2397 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2398 | }; | |
2399 | ||
2400 | static struct intel_uncore_type bdx_uncore_irp = { | |
2401 | .name = "irp", | |
2402 | .num_counters = 4, | |
2403 | .num_boxes = 1, | |
2404 | .perf_ctr_bits = 48, | |
2405 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, | |
2406 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | |
2407 | .ops = &hswep_uncore_irp_ops, | |
2408 | .format_group = &snbep_uncore_format_group, | |
2409 | }; | |
2410 | ||
2411 | ||
2412 | static struct event_constraint bdx_uncore_r2pcie_constraints[] = { | |
2413 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | |
2414 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | |
2415 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | |
2416 | UNCORE_EVENT_CONSTRAINT(0x23, 0x1), | |
2417 | UNCORE_EVENT_CONSTRAINT(0x25, 0x1), | |
2418 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | |
2419 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | |
2420 | EVENT_CONSTRAINT_END | |
2421 | }; | |
2422 | ||
2423 | static struct intel_uncore_type bdx_uncore_r2pcie = { | |
2424 | .name = "r2pcie", | |
2425 | .num_counters = 4, | |
2426 | .num_boxes = 1, | |
2427 | .perf_ctr_bits = 48, | |
2428 | .constraints = bdx_uncore_r2pcie_constraints, | |
2429 | SNBEP_UNCORE_PCI_COMMON_INIT(), | |
2430 | }; | |
2431 | ||
2432 | enum { | |
2433 | BDX_PCI_UNCORE_HA, | |
2434 | BDX_PCI_UNCORE_IMC, | |
2435 | BDX_PCI_UNCORE_IRP, | |
2436 | BDX_PCI_UNCORE_R2PCIE, | |
2437 | }; | |
2438 | ||
2439 | static struct intel_uncore_type *bdx_pci_uncores[] = { | |
2440 | [BDX_PCI_UNCORE_HA] = &bdx_uncore_ha, | |
2441 | [BDX_PCI_UNCORE_IMC] = &bdx_uncore_imc, | |
2442 | [BDX_PCI_UNCORE_IRP] = &bdx_uncore_irp, | |
2443 | [BDX_PCI_UNCORE_R2PCIE] = &bdx_uncore_r2pcie, | |
2444 | NULL, | |
2445 | }; | |
2446 | ||
2447 | static DEFINE_PCI_DEVICE_TABLE(bdx_uncore_pci_ids) = { | |
2448 | { /* Home Agent 0 */ | |
2449 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f30), | |
2450 | .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA, 0), | |
2451 | }, | |
2452 | { /* MC0 Channel 0 */ | |
2453 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb0), | |
2454 | .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 0), | |
2455 | }, | |
2456 | { /* MC0 Channel 1 */ | |
2457 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb1), | |
2458 | .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 1), | |
2459 | }, | |
2460 | { /* IRP */ | |
2461 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f39), | |
2462 | .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IRP, 0), | |
2463 | }, | |
2464 | { /* R2PCIe */ | |
2465 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f34), | |
2466 | .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R2PCIE, 0), | |
2467 | }, | |
2468 | { /* end: all zeroes */ } | |
2469 | }; | |
2470 | ||
2471 | static struct pci_driver bdx_uncore_pci_driver = { | |
2472 | .name = "bdx_uncore", | |
2473 | .id_table = bdx_uncore_pci_ids, | |
2474 | }; | |
2475 | ||
2476 | int bdx_uncore_pci_init(void) | |
2477 | { | |
2478 | int ret = snbep_pci2phy_map_init(0x6f1e); | |
2479 | ||
2480 | if (ret) | |
2481 | return ret; | |
2482 | uncore_pci_uncores = bdx_pci_uncores; | |
2483 | uncore_pci_driver = &bdx_uncore_pci_driver; | |
2484 | return 0; | |
2485 | } | |
2486 | ||
2487 | /* end of BDX-DE uncore support */ |