1 /* SandyBridge-EP/IvyTown uncore support */
4 /* SNB-EP Box level control */
5 #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
6 #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
7 #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
8 #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
9 #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
10 SNBEP_PMON_BOX_CTL_RST_CTRS | \
11 SNBEP_PMON_BOX_CTL_FRZ_EN)
12 /* SNB-EP event control */
13 #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
14 #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
15 #define SNBEP_PMON_CTL_RST (1 << 17)
16 #define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
17 #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21)
18 #define SNBEP_PMON_CTL_EN (1 << 22)
19 #define SNBEP_PMON_CTL_INVERT (1 << 23)
20 #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
21 #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
22 SNBEP_PMON_CTL_UMASK_MASK | \
23 SNBEP_PMON_CTL_EDGE_DET | \
24 SNBEP_PMON_CTL_INVERT | \
25 SNBEP_PMON_CTL_TRESH_MASK)
27 /* SNB-EP Ubox event control */
28 #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
29 #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
30 (SNBEP_PMON_CTL_EV_SEL_MASK | \
31 SNBEP_PMON_CTL_UMASK_MASK | \
32 SNBEP_PMON_CTL_EDGE_DET | \
33 SNBEP_PMON_CTL_INVERT | \
34 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
36 #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
37 #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
38 SNBEP_CBO_PMON_CTL_TID_EN)
40 /* SNB-EP PCU event control */
41 #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
42 #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
43 #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
44 #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
45 #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
46 (SNBEP_PMON_CTL_EV_SEL_MASK | \
47 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
48 SNBEP_PMON_CTL_EDGE_DET | \
49 SNBEP_PMON_CTL_INVERT | \
50 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
51 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
52 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
54 #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
55 (SNBEP_PMON_RAW_EVENT_MASK | \
56 SNBEP_PMON_CTL_EV_SEL_EXT)
58 /* SNB-EP pci control register */
59 #define SNBEP_PCI_PMON_BOX_CTL 0xf4
60 #define SNBEP_PCI_PMON_CTL0 0xd8
61 /* SNB-EP pci counter register */
62 #define SNBEP_PCI_PMON_CTR0 0xa0
64 /* SNB-EP home agent register */
65 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
66 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
67 #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
68 /* SNB-EP memory controller register */
69 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
70 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
71 /* SNB-EP QPI register */
72 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
73 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
74 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
75 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
77 /* SNB-EP Ubox register */
78 #define SNBEP_U_MSR_PMON_CTR0 0xc16
79 #define SNBEP_U_MSR_PMON_CTL0 0xc10
81 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
82 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
84 /* SNB-EP Cbo register */
85 #define SNBEP_C0_MSR_PMON_CTR0 0xd16
86 #define SNBEP_C0_MSR_PMON_CTL0 0xd10
87 #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
88 #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
89 #define SNBEP_CBO_MSR_OFFSET 0x20
91 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f
92 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00
93 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000
94 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000
96 #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \
98 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
103 /* SNB-EP PCU register */
104 #define SNBEP_PCU_MSR_PMON_CTR0 0xc36
105 #define SNBEP_PCU_MSR_PMON_CTL0 0xc30
106 #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
107 #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
108 #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
109 #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
110 #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
112 /* IVBEP event control */
113 #define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
114 SNBEP_PMON_BOX_CTL_RST_CTRS)
115 #define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
116 SNBEP_PMON_CTL_UMASK_MASK | \
117 SNBEP_PMON_CTL_EDGE_DET | \
118 SNBEP_PMON_CTL_TRESH_MASK)
120 #define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00
121 #define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
122 #define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29)
124 #define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \
125 (SNBEP_PMON_CTL_EV_SEL_MASK | \
126 SNBEP_PMON_CTL_UMASK_MASK | \
127 SNBEP_PMON_CTL_EDGE_DET | \
128 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
130 #define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \
131 SNBEP_CBO_PMON_CTL_TID_EN)
133 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0)
134 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5)
135 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17)
136 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
137 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
138 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
139 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
140 #define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
142 /* IVBEP home agent */
143 #define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16)
144 #define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \
145 (IVBEP_PMON_RAW_EVENT_MASK | \
146 IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST)
148 #define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
149 (SNBEP_PMON_CTL_EV_SEL_MASK | \
150 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
151 SNBEP_PMON_CTL_EDGE_DET | \
152 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
153 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
154 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
156 #define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
157 (IVBEP_PMON_RAW_EVENT_MASK | \
158 SNBEP_PMON_CTL_EV_SEL_EXT)
160 #define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \
161 ((1ULL << (n)) - 1)))
163 /* Haswell-EP Ubox */
164 #define HSWEP_U_MSR_PMON_CTR0 0x709
165 #define HSWEP_U_MSR_PMON_CTL0 0x705
166 #define HSWEP_U_MSR_PMON_FILTER 0x707
168 #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703
169 #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704
171 #define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0)
172 #define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1)
173 #define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \
174 (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \
175 HSWEP_U_MSR_PMON_BOX_FILTER_CID)
178 #define HSWEP_C0_MSR_PMON_CTR0 0xe08
179 #define HSWEP_C0_MSR_PMON_CTL0 0xe01
180 #define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00
181 #define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05
182 #define HSWEP_CBO_MSR_OFFSET 0x10
185 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0)
186 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6)
187 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17)
188 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
189 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
190 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
191 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
192 #define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
195 /* Haswell-EP Sbox */
196 #define HSWEP_S0_MSR_PMON_CTR0 0x726
197 #define HSWEP_S0_MSR_PMON_CTL0 0x721
198 #define HSWEP_S0_MSR_PMON_BOX_CTL 0x720
199 #define HSWEP_SBOX_MSR_OFFSET 0xa
200 #define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
201 SNBEP_CBO_PMON_CTL_TID_EN)
204 #define HSWEP_PCU_MSR_PMON_CTR0 0x717
205 #define HSWEP_PCU_MSR_PMON_CTL0 0x711
206 #define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710
207 #define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715
210 #define KNL_U_MSR_PMON_RAW_EVENT_MASK \
211 (SNBEP_U_MSR_PMON_RAW_EVENT_MASK | \
212 SNBEP_CBO_PMON_CTL_TID_EN)
214 #define KNL_CHA_MSR_OFFSET 0xc
215 #define KNL_CHA_MSR_PMON_CTL_QOR (1 << 16)
216 #define KNL_CHA_MSR_PMON_RAW_EVENT_MASK \
217 (SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK | \
218 KNL_CHA_MSR_PMON_CTL_QOR)
219 #define KNL_CHA_MSR_PMON_BOX_FILTER_TID 0x1ff
220 #define KNL_CHA_MSR_PMON_BOX_FILTER_STATE (7 << 18)
221 #define KNL_CHA_MSR_PMON_BOX_FILTER_OP (0xfffffe2aULL << 32)
223 /* KNL EDC/MC UCLK */
224 #define KNL_UCLK_MSR_PMON_CTR0_LOW 0x400
225 #define KNL_UCLK_MSR_PMON_CTL0 0x420
226 #define KNL_UCLK_MSR_PMON_BOX_CTL 0x430
227 #define KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW 0x44c
228 #define KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL 0x454
229 #define KNL_PMON_FIXED_CTL_EN 0x1
232 #define KNL_EDC0_ECLK_MSR_PMON_CTR0_LOW 0xa00
233 #define KNL_EDC0_ECLK_MSR_PMON_CTL0 0xa20
234 #define KNL_EDC0_ECLK_MSR_PMON_BOX_CTL 0xa30
235 #define KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_LOW 0xa3c
236 #define KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_CTL 0xa44
239 #define KNL_MC0_CH0_MSR_PMON_CTR0_LOW 0xb00
240 #define KNL_MC0_CH0_MSR_PMON_CTL0 0xb20
241 #define KNL_MC0_CH0_MSR_PMON_BOX_CTL 0xb30
242 #define KNL_MC0_CH0_MSR_PMON_FIXED_LOW 0xb3c
243 #define KNL_MC0_CH0_MSR_PMON_FIXED_CTL 0xb44
246 #define KNL_IRP_PCI_PMON_BOX_CTL 0xf0
247 #define KNL_IRP_PCI_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
248 KNL_CHA_MSR_PMON_CTL_QOR)
250 #define KNL_PCU_PMON_CTL_EV_SEL_MASK 0x0000007f
251 #define KNL_PCU_PMON_CTL_USE_OCC_CTR (1 << 7)
252 #define KNL_PCU_MSR_PMON_CTL_TRESH_MASK 0x3f000000
253 #define KNL_PCU_MSR_PMON_RAW_EVENT_MASK \
254 (KNL_PCU_PMON_CTL_EV_SEL_MASK | \
255 KNL_PCU_PMON_CTL_USE_OCC_CTR | \
256 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
257 SNBEP_PMON_CTL_EDGE_DET | \
258 SNBEP_CBO_PMON_CTL_TID_EN | \
259 SNBEP_PMON_CTL_INVERT | \
260 KNL_PCU_MSR_PMON_CTL_TRESH_MASK | \
261 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
262 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
264 DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7");
265 DEFINE_UNCORE_FORMAT_ATTR(event2, event, "config:0-6");
266 DEFINE_UNCORE_FORMAT_ATTR(event_ext, event, "config:0-7,21");
267 DEFINE_UNCORE_FORMAT_ATTR(use_occ_ctr, use_occ_ctr, "config:7");
268 DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15");
269 DEFINE_UNCORE_FORMAT_ATTR(qor, qor, "config:16");
270 DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18");
271 DEFINE_UNCORE_FORMAT_ATTR(tid_en, tid_en, "config:19");
272 DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23");
273 DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31");
274 DEFINE_UNCORE_FORMAT_ATTR(thresh6, thresh, "config:24-29");
275 DEFINE_UNCORE_FORMAT_ATTR(thresh5, thresh, "config:24-28");
276 DEFINE_UNCORE_FORMAT_ATTR(occ_sel, occ_sel, "config:14-15");
277 DEFINE_UNCORE_FORMAT_ATTR(occ_invert, occ_invert, "config:30");
278 DEFINE_UNCORE_FORMAT_ATTR(occ_edge, occ_edge, "config:14-51");
279 DEFINE_UNCORE_FORMAT_ATTR(occ_edge_det, occ_edge_det, "config:31");
280 DEFINE_UNCORE_FORMAT_ATTR(filter_tid, filter_tid, "config1:0-4");
281 DEFINE_UNCORE_FORMAT_ATTR(filter_tid2, filter_tid, "config1:0");
282 DEFINE_UNCORE_FORMAT_ATTR(filter_tid3, filter_tid, "config1:0-5");
283 DEFINE_UNCORE_FORMAT_ATTR(filter_tid4, filter_tid, "config1:0-8");
284 DEFINE_UNCORE_FORMAT_ATTR(filter_cid, filter_cid, "config1:5");
285 DEFINE_UNCORE_FORMAT_ATTR(filter_link, filter_link, "config1:5-8");
286 DEFINE_UNCORE_FORMAT_ATTR(filter_link2, filter_link, "config1:6-8");
287 DEFINE_UNCORE_FORMAT_ATTR(filter_link3, filter_link, "config1:12");
288 DEFINE_UNCORE_FORMAT_ATTR(filter_nid, filter_nid, "config1:10-17");
289 DEFINE_UNCORE_FORMAT_ATTR(filter_nid2, filter_nid, "config1:32-47");
290 DEFINE_UNCORE_FORMAT_ATTR(filter_state, filter_state, "config1:18-22");
291 DEFINE_UNCORE_FORMAT_ATTR(filter_state2, filter_state, "config1:17-22");
292 DEFINE_UNCORE_FORMAT_ATTR(filter_state3, filter_state, "config1:17-23");
293 DEFINE_UNCORE_FORMAT_ATTR(filter_state4, filter_state, "config1:18-20");
294 DEFINE_UNCORE_FORMAT_ATTR(filter_local, filter_local, "config1:33");
295 DEFINE_UNCORE_FORMAT_ATTR(filter_all_op, filter_all_op, "config1:35");
296 DEFINE_UNCORE_FORMAT_ATTR(filter_nnm, filter_nnm, "config1:37");
297 DEFINE_UNCORE_FORMAT_ATTR(filter_opc, filter_opc, "config1:23-31");
298 DEFINE_UNCORE_FORMAT_ATTR(filter_opc2, filter_opc, "config1:52-60");
299 DEFINE_UNCORE_FORMAT_ATTR(filter_opc3, filter_opc, "config1:41-60");
300 DEFINE_UNCORE_FORMAT_ATTR(filter_nc, filter_nc, "config1:62");
301 DEFINE_UNCORE_FORMAT_ATTR(filter_c6, filter_c6, "config1:61");
302 DEFINE_UNCORE_FORMAT_ATTR(filter_isoc, filter_isoc, "config1:63");
303 DEFINE_UNCORE_FORMAT_ATTR(filter_band0, filter_band0, "config1:0-7");
304 DEFINE_UNCORE_FORMAT_ATTR(filter_band1, filter_band1, "config1:8-15");
305 DEFINE_UNCORE_FORMAT_ATTR(filter_band2, filter_band2, "config1:16-23");
306 DEFINE_UNCORE_FORMAT_ATTR(filter_band3, filter_band3, "config1:24-31");
307 DEFINE_UNCORE_FORMAT_ATTR(match_rds, match_rds, "config1:48-51");
308 DEFINE_UNCORE_FORMAT_ATTR(match_rnid30, match_rnid30, "config1:32-35");
309 DEFINE_UNCORE_FORMAT_ATTR(match_rnid4, match_rnid4, "config1:31");
310 DEFINE_UNCORE_FORMAT_ATTR(match_dnid, match_dnid, "config1:13-17");
311 DEFINE_UNCORE_FORMAT_ATTR(match_mc, match_mc, "config1:9-12");
312 DEFINE_UNCORE_FORMAT_ATTR(match_opc, match_opc, "config1:5-8");
313 DEFINE_UNCORE_FORMAT_ATTR(match_vnw, match_vnw, "config1:3-4");
314 DEFINE_UNCORE_FORMAT_ATTR(match0, match0, "config1:0-31");
315 DEFINE_UNCORE_FORMAT_ATTR(match1, match1, "config1:32-63");
316 DEFINE_UNCORE_FORMAT_ATTR(mask_rds, mask_rds, "config2:48-51");
317 DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30, mask_rnid30, "config2:32-35");
318 DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4, mask_rnid4, "config2:31");
319 DEFINE_UNCORE_FORMAT_ATTR(mask_dnid, mask_dnid, "config2:13-17");
320 DEFINE_UNCORE_FORMAT_ATTR(mask_mc, mask_mc, "config2:9-12");
321 DEFINE_UNCORE_FORMAT_ATTR(mask_opc, mask_opc, "config2:5-8");
322 DEFINE_UNCORE_FORMAT_ATTR(mask_vnw, mask_vnw, "config2:3-4");
323 DEFINE_UNCORE_FORMAT_ATTR(mask0, mask0, "config2:0-31");
324 DEFINE_UNCORE_FORMAT_ATTR(mask1, mask1, "config2:32-63");
326 static void snbep_uncore_pci_disable_box(struct intel_uncore_box *box)
328 struct pci_dev *pdev = box->pci_dev;
329 int box_ctl = uncore_pci_box_ctl(box);
332 if (!pci_read_config_dword(pdev, box_ctl, &config)) {
333 config |= SNBEP_PMON_BOX_CTL_FRZ;
334 pci_write_config_dword(pdev, box_ctl, config);
338 static void snbep_uncore_pci_enable_box(struct intel_uncore_box *box)
340 struct pci_dev *pdev = box->pci_dev;
341 int box_ctl = uncore_pci_box_ctl(box);
344 if (!pci_read_config_dword(pdev, box_ctl, &config)) {
345 config &= ~SNBEP_PMON_BOX_CTL_FRZ;
346 pci_write_config_dword(pdev, box_ctl, config);
350 static void snbep_uncore_pci_enable_event(struct intel_uncore_box *box, struct perf_event *event)
352 struct pci_dev *pdev = box->pci_dev;
353 struct hw_perf_event *hwc = &event->hw;
355 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
358 static void snbep_uncore_pci_disable_event(struct intel_uncore_box *box, struct perf_event *event)
360 struct pci_dev *pdev = box->pci_dev;
361 struct hw_perf_event *hwc = &event->hw;
363 pci_write_config_dword(pdev, hwc->config_base, hwc->config);
366 static u64 snbep_uncore_pci_read_counter(struct intel_uncore_box *box, struct perf_event *event)
368 struct pci_dev *pdev = box->pci_dev;
369 struct hw_perf_event *hwc = &event->hw;
372 pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count);
373 pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1);
378 static void snbep_uncore_pci_init_box(struct intel_uncore_box *box)
380 struct pci_dev *pdev = box->pci_dev;
381 int box_ctl = uncore_pci_box_ctl(box);
383 pci_write_config_dword(pdev, box_ctl, SNBEP_PMON_BOX_CTL_INT);
386 static void snbep_uncore_msr_disable_box(struct intel_uncore_box *box)
391 msr = uncore_msr_box_ctl(box);
394 config |= SNBEP_PMON_BOX_CTL_FRZ;
399 static void snbep_uncore_msr_enable_box(struct intel_uncore_box *box)
404 msr = uncore_msr_box_ctl(box);
407 config &= ~SNBEP_PMON_BOX_CTL_FRZ;
412 static void snbep_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event)
414 struct hw_perf_event *hwc = &event->hw;
415 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
417 if (reg1->idx != EXTRA_REG_NONE)
418 wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0));
420 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
423 static void snbep_uncore_msr_disable_event(struct intel_uncore_box *box,
424 struct perf_event *event)
426 struct hw_perf_event *hwc = &event->hw;
428 wrmsrl(hwc->config_base, hwc->config);
431 static void snbep_uncore_msr_init_box(struct intel_uncore_box *box)
433 unsigned msr = uncore_msr_box_ctl(box);
436 wrmsrl(msr, SNBEP_PMON_BOX_CTL_INT);
439 static struct attribute *snbep_uncore_formats_attr[] = {
440 &format_attr_event.attr,
441 &format_attr_umask.attr,
442 &format_attr_edge.attr,
443 &format_attr_inv.attr,
444 &format_attr_thresh8.attr,
448 static struct attribute *snbep_uncore_ubox_formats_attr[] = {
449 &format_attr_event.attr,
450 &format_attr_umask.attr,
451 &format_attr_edge.attr,
452 &format_attr_inv.attr,
453 &format_attr_thresh5.attr,
457 static struct attribute *snbep_uncore_cbox_formats_attr[] = {
458 &format_attr_event.attr,
459 &format_attr_umask.attr,
460 &format_attr_edge.attr,
461 &format_attr_tid_en.attr,
462 &format_attr_inv.attr,
463 &format_attr_thresh8.attr,
464 &format_attr_filter_tid.attr,
465 &format_attr_filter_nid.attr,
466 &format_attr_filter_state.attr,
467 &format_attr_filter_opc.attr,
471 static struct attribute *snbep_uncore_pcu_formats_attr[] = {
472 &format_attr_event.attr,
473 &format_attr_occ_sel.attr,
474 &format_attr_edge.attr,
475 &format_attr_inv.attr,
476 &format_attr_thresh5.attr,
477 &format_attr_occ_invert.attr,
478 &format_attr_occ_edge.attr,
479 &format_attr_filter_band0.attr,
480 &format_attr_filter_band1.attr,
481 &format_attr_filter_band2.attr,
482 &format_attr_filter_band3.attr,
486 static struct attribute *snbep_uncore_qpi_formats_attr[] = {
487 &format_attr_event_ext.attr,
488 &format_attr_umask.attr,
489 &format_attr_edge.attr,
490 &format_attr_inv.attr,
491 &format_attr_thresh8.attr,
492 &format_attr_match_rds.attr,
493 &format_attr_match_rnid30.attr,
494 &format_attr_match_rnid4.attr,
495 &format_attr_match_dnid.attr,
496 &format_attr_match_mc.attr,
497 &format_attr_match_opc.attr,
498 &format_attr_match_vnw.attr,
499 &format_attr_match0.attr,
500 &format_attr_match1.attr,
501 &format_attr_mask_rds.attr,
502 &format_attr_mask_rnid30.attr,
503 &format_attr_mask_rnid4.attr,
504 &format_attr_mask_dnid.attr,
505 &format_attr_mask_mc.attr,
506 &format_attr_mask_opc.attr,
507 &format_attr_mask_vnw.attr,
508 &format_attr_mask0.attr,
509 &format_attr_mask1.attr,
513 static struct uncore_event_desc snbep_uncore_imc_events[] = {
514 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0x00"),
515 INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"),
516 INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"),
517 INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"),
518 INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"),
519 INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"),
520 INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"),
521 { /* end: all zeroes */ },
524 static struct uncore_event_desc snbep_uncore_qpi_events[] = {
525 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x14"),
526 INTEL_UNCORE_EVENT_DESC(txl_flits_active, "event=0x00,umask=0x06"),
527 INTEL_UNCORE_EVENT_DESC(drs_data, "event=0x102,umask=0x08"),
528 INTEL_UNCORE_EVENT_DESC(ncb_data, "event=0x103,umask=0x04"),
529 { /* end: all zeroes */ },
532 static struct attribute_group snbep_uncore_format_group = {
534 .attrs = snbep_uncore_formats_attr,
537 static struct attribute_group snbep_uncore_ubox_format_group = {
539 .attrs = snbep_uncore_ubox_formats_attr,
542 static struct attribute_group snbep_uncore_cbox_format_group = {
544 .attrs = snbep_uncore_cbox_formats_attr,
547 static struct attribute_group snbep_uncore_pcu_format_group = {
549 .attrs = snbep_uncore_pcu_formats_attr,
552 static struct attribute_group snbep_uncore_qpi_format_group = {
554 .attrs = snbep_uncore_qpi_formats_attr,
557 #define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
558 .disable_box = snbep_uncore_msr_disable_box, \
559 .enable_box = snbep_uncore_msr_enable_box, \
560 .disable_event = snbep_uncore_msr_disable_event, \
561 .enable_event = snbep_uncore_msr_enable_event, \
562 .read_counter = uncore_msr_read_counter
564 #define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
565 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \
566 .init_box = snbep_uncore_msr_init_box \
568 static struct intel_uncore_ops snbep_uncore_msr_ops = {
569 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
572 #define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \
573 .init_box = snbep_uncore_pci_init_box, \
574 .disable_box = snbep_uncore_pci_disable_box, \
575 .enable_box = snbep_uncore_pci_enable_box, \
576 .disable_event = snbep_uncore_pci_disable_event, \
577 .read_counter = snbep_uncore_pci_read_counter
579 static struct intel_uncore_ops snbep_uncore_pci_ops = {
580 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
581 .enable_event = snbep_uncore_pci_enable_event, \
584 static struct event_constraint snbep_uncore_cbox_constraints[] = {
585 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
586 UNCORE_EVENT_CONSTRAINT(0x02, 0x3),
587 UNCORE_EVENT_CONSTRAINT(0x04, 0x3),
588 UNCORE_EVENT_CONSTRAINT(0x05, 0x3),
589 UNCORE_EVENT_CONSTRAINT(0x07, 0x3),
590 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
591 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
592 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
593 UNCORE_EVENT_CONSTRAINT(0x13, 0x3),
594 UNCORE_EVENT_CONSTRAINT(0x1b, 0xc),
595 UNCORE_EVENT_CONSTRAINT(0x1c, 0xc),
596 UNCORE_EVENT_CONSTRAINT(0x1d, 0xc),
597 UNCORE_EVENT_CONSTRAINT(0x1e, 0xc),
598 EVENT_CONSTRAINT_OVERLAP(0x1f, 0xe, 0xff),
599 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
600 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
601 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
602 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
603 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
604 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
605 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
606 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
607 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
608 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
609 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
610 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
614 static struct event_constraint snbep_uncore_r2pcie_constraints[] = {
615 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
616 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
617 UNCORE_EVENT_CONSTRAINT(0x12, 0x1),
618 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
619 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
620 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
621 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
622 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
623 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
624 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
628 static struct event_constraint snbep_uncore_r3qpi_constraints[] = {
629 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
630 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
631 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
632 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
633 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
634 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
635 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
636 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
637 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
638 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
639 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
640 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
641 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
642 UNCORE_EVENT_CONSTRAINT(0x2a, 0x3),
643 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
644 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
645 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
646 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
647 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
648 UNCORE_EVENT_CONSTRAINT(0x30, 0x3),
649 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
650 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
651 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
652 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
653 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
654 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
655 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
656 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
660 static struct intel_uncore_type snbep_uncore_ubox = {
665 .fixed_ctr_bits = 48,
666 .perf_ctr = SNBEP_U_MSR_PMON_CTR0,
667 .event_ctl = SNBEP_U_MSR_PMON_CTL0,
668 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
669 .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR,
670 .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL,
671 .ops = &snbep_uncore_msr_ops,
672 .format_group = &snbep_uncore_ubox_format_group,
675 static struct extra_reg snbep_uncore_cbox_extra_regs[] = {
676 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
677 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
678 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
679 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6),
680 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
681 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6),
682 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
683 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6),
684 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6),
685 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8),
686 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8),
687 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa),
688 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa),
689 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2),
690 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2),
691 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2),
692 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2),
693 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8),
694 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8),
695 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa),
696 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa),
697 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2),
698 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2),
699 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2),
700 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2),
704 static void snbep_cbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event)
706 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
707 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
710 if (uncore_box_is_fake(box))
713 for (i = 0; i < 5; i++) {
714 if (reg1->alloc & (0x1 << i))
715 atomic_sub(1 << (i * 6), &er->ref);
720 static struct event_constraint *
721 __snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event,
722 u64 (*cbox_filter_mask)(int fields))
724 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
725 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
730 if (reg1->idx == EXTRA_REG_NONE)
733 raw_spin_lock_irqsave(&er->lock, flags);
734 for (i = 0; i < 5; i++) {
735 if (!(reg1->idx & (0x1 << i)))
737 if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i)))
740 mask = cbox_filter_mask(0x1 << i);
741 if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) ||
742 !((reg1->config ^ er->config) & mask)) {
743 atomic_add(1 << (i * 6), &er->ref);
745 er->config |= reg1->config & mask;
751 raw_spin_unlock_irqrestore(&er->lock, flags);
755 if (!uncore_box_is_fake(box))
756 reg1->alloc |= alloc;
760 for (; i >= 0; i--) {
761 if (alloc & (0x1 << i))
762 atomic_sub(1 << (i * 6), &er->ref);
764 return &uncore_constraint_empty;
767 static u64 snbep_cbox_filter_mask(int fields)
772 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID;
774 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID;
776 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE;
778 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC;
783 static struct event_constraint *
784 snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
786 return __snbep_cbox_get_constraint(box, event, snbep_cbox_filter_mask);
789 static int snbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
791 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
792 struct extra_reg *er;
795 for (er = snbep_uncore_cbox_extra_regs; er->msr; er++) {
796 if (er->event != (event->hw.config & er->config_mask))
802 reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER +
803 SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
804 reg1->config = event->attr.config1 & snbep_cbox_filter_mask(idx);
810 static struct intel_uncore_ops snbep_uncore_cbox_ops = {
811 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
812 .hw_config = snbep_cbox_hw_config,
813 .get_constraint = snbep_cbox_get_constraint,
814 .put_constraint = snbep_cbox_put_constraint,
817 static struct intel_uncore_type snbep_uncore_cbox = {
822 .event_ctl = SNBEP_C0_MSR_PMON_CTL0,
823 .perf_ctr = SNBEP_C0_MSR_PMON_CTR0,
824 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
825 .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL,
826 .msr_offset = SNBEP_CBO_MSR_OFFSET,
827 .num_shared_regs = 1,
828 .constraints = snbep_uncore_cbox_constraints,
829 .ops = &snbep_uncore_cbox_ops,
830 .format_group = &snbep_uncore_cbox_format_group,
833 static u64 snbep_pcu_alter_er(struct perf_event *event, int new_idx, bool modify)
835 struct hw_perf_event *hwc = &event->hw;
836 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
837 u64 config = reg1->config;
839 if (new_idx > reg1->idx)
840 config <<= 8 * (new_idx - reg1->idx);
842 config >>= 8 * (reg1->idx - new_idx);
845 hwc->config += new_idx - reg1->idx;
846 reg1->config = config;
852 static struct event_constraint *
853 snbep_pcu_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
855 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
856 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
859 u64 mask, config1 = reg1->config;
862 if (reg1->idx == EXTRA_REG_NONE ||
863 (!uncore_box_is_fake(box) && reg1->alloc))
866 mask = 0xffULL << (idx * 8);
867 raw_spin_lock_irqsave(&er->lock, flags);
868 if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) ||
869 !((config1 ^ er->config) & mask)) {
870 atomic_add(1 << (idx * 8), &er->ref);
872 er->config |= config1 & mask;
875 raw_spin_unlock_irqrestore(&er->lock, flags);
879 if (idx != reg1->idx) {
880 config1 = snbep_pcu_alter_er(event, idx, false);
883 return &uncore_constraint_empty;
886 if (!uncore_box_is_fake(box)) {
887 if (idx != reg1->idx)
888 snbep_pcu_alter_er(event, idx, true);
894 static void snbep_pcu_put_constraint(struct intel_uncore_box *box, struct perf_event *event)
896 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
897 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
899 if (uncore_box_is_fake(box) || !reg1->alloc)
902 atomic_sub(1 << (reg1->idx * 8), &er->ref);
906 static int snbep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event)
908 struct hw_perf_event *hwc = &event->hw;
909 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
910 int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK;
912 if (ev_sel >= 0xb && ev_sel <= 0xe) {
913 reg1->reg = SNBEP_PCU_MSR_PMON_BOX_FILTER;
914 reg1->idx = ev_sel - 0xb;
915 reg1->config = event->attr.config1 & (0xff << (reg1->idx * 8));
920 static struct intel_uncore_ops snbep_uncore_pcu_ops = {
921 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
922 .hw_config = snbep_pcu_hw_config,
923 .get_constraint = snbep_pcu_get_constraint,
924 .put_constraint = snbep_pcu_put_constraint,
927 static struct intel_uncore_type snbep_uncore_pcu = {
932 .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0,
933 .event_ctl = SNBEP_PCU_MSR_PMON_CTL0,
934 .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
935 .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL,
936 .num_shared_regs = 1,
937 .ops = &snbep_uncore_pcu_ops,
938 .format_group = &snbep_uncore_pcu_format_group,
941 static struct intel_uncore_type *snbep_msr_uncores[] = {
948 void snbep_uncore_cpu_init(void)
950 if (snbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
951 snbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
952 uncore_msr_uncores = snbep_msr_uncores;
956 SNBEP_PCI_QPI_PORT0_FILTER,
957 SNBEP_PCI_QPI_PORT1_FILTER,
961 static int snbep_qpi_hw_config(struct intel_uncore_box *box, struct perf_event *event)
963 struct hw_perf_event *hwc = &event->hw;
964 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
965 struct hw_perf_event_extra *reg2 = &hwc->branch_reg;
967 if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) {
969 reg1->reg = SNBEP_Q_Py_PCI_PMON_PKT_MATCH0;
970 reg1->config = event->attr.config1;
971 reg2->reg = SNBEP_Q_Py_PCI_PMON_PKT_MASK0;
972 reg2->config = event->attr.config2;
977 static void snbep_qpi_enable_event(struct intel_uncore_box *box, struct perf_event *event)
979 struct pci_dev *pdev = box->pci_dev;
980 struct hw_perf_event *hwc = &event->hw;
981 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
982 struct hw_perf_event_extra *reg2 = &hwc->branch_reg;
984 if (reg1->idx != EXTRA_REG_NONE) {
985 int idx = box->pmu->pmu_idx + SNBEP_PCI_QPI_PORT0_FILTER;
986 int pkg = topology_phys_to_logical_pkg(box->pci_phys_id);
987 struct pci_dev *filter_pdev = uncore_extra_pci_dev[pkg].dev[idx];
990 pci_write_config_dword(filter_pdev, reg1->reg,
992 pci_write_config_dword(filter_pdev, reg1->reg + 4,
993 (u32)(reg1->config >> 32));
994 pci_write_config_dword(filter_pdev, reg2->reg,
996 pci_write_config_dword(filter_pdev, reg2->reg + 4,
997 (u32)(reg2->config >> 32));
1001 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
1004 static struct intel_uncore_ops snbep_uncore_qpi_ops = {
1005 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
1006 .enable_event = snbep_qpi_enable_event,
1007 .hw_config = snbep_qpi_hw_config,
1008 .get_constraint = uncore_get_constraint,
1009 .put_constraint = uncore_put_constraint,
1012 #define SNBEP_UNCORE_PCI_COMMON_INIT() \
1013 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
1014 .event_ctl = SNBEP_PCI_PMON_CTL0, \
1015 .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \
1016 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
1017 .ops = &snbep_uncore_pci_ops, \
1018 .format_group = &snbep_uncore_format_group
1020 static struct intel_uncore_type snbep_uncore_ha = {
1024 .perf_ctr_bits = 48,
1025 SNBEP_UNCORE_PCI_COMMON_INIT(),
1028 static struct intel_uncore_type snbep_uncore_imc = {
1032 .perf_ctr_bits = 48,
1033 .fixed_ctr_bits = 48,
1034 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
1035 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
1036 .event_descs = snbep_uncore_imc_events,
1037 SNBEP_UNCORE_PCI_COMMON_INIT(),
1040 static struct intel_uncore_type snbep_uncore_qpi = {
1044 .perf_ctr_bits = 48,
1045 .perf_ctr = SNBEP_PCI_PMON_CTR0,
1046 .event_ctl = SNBEP_PCI_PMON_CTL0,
1047 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
1048 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1049 .num_shared_regs = 1,
1050 .ops = &snbep_uncore_qpi_ops,
1051 .event_descs = snbep_uncore_qpi_events,
1052 .format_group = &snbep_uncore_qpi_format_group,
1056 static struct intel_uncore_type snbep_uncore_r2pcie = {
1060 .perf_ctr_bits = 44,
1061 .constraints = snbep_uncore_r2pcie_constraints,
1062 SNBEP_UNCORE_PCI_COMMON_INIT(),
1065 static struct intel_uncore_type snbep_uncore_r3qpi = {
1069 .perf_ctr_bits = 44,
1070 .constraints = snbep_uncore_r3qpi_constraints,
1071 SNBEP_UNCORE_PCI_COMMON_INIT(),
1075 SNBEP_PCI_UNCORE_HA,
1076 SNBEP_PCI_UNCORE_IMC,
1077 SNBEP_PCI_UNCORE_QPI,
1078 SNBEP_PCI_UNCORE_R2PCIE,
1079 SNBEP_PCI_UNCORE_R3QPI,
1082 static struct intel_uncore_type *snbep_pci_uncores[] = {
1083 [SNBEP_PCI_UNCORE_HA] = &snbep_uncore_ha,
1084 [SNBEP_PCI_UNCORE_IMC] = &snbep_uncore_imc,
1085 [SNBEP_PCI_UNCORE_QPI] = &snbep_uncore_qpi,
1086 [SNBEP_PCI_UNCORE_R2PCIE] = &snbep_uncore_r2pcie,
1087 [SNBEP_PCI_UNCORE_R3QPI] = &snbep_uncore_r3qpi,
1091 static const struct pci_device_id snbep_uncore_pci_ids[] = {
1093 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_HA),
1094 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA, 0),
1096 { /* MC Channel 0 */
1097 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC0),
1098 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 0),
1100 { /* MC Channel 1 */
1101 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC1),
1102 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 1),
1104 { /* MC Channel 2 */
1105 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC2),
1106 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 2),
1108 { /* MC Channel 3 */
1109 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC3),
1110 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 3),
1113 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI0),
1114 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 0),
1117 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI1),
1118 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 1),
1121 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R2PCIE),
1122 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE, 0),
1124 { /* R3QPI Link 0 */
1125 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI0),
1126 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 0),
1128 { /* R3QPI Link 1 */
1129 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI1),
1130 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 1),
1132 { /* QPI Port 0 filter */
1133 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c86),
1134 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1135 SNBEP_PCI_QPI_PORT0_FILTER),
1137 { /* QPI Port 0 filter */
1138 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c96),
1139 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1140 SNBEP_PCI_QPI_PORT1_FILTER),
1142 { /* end: all zeroes */ }
1145 static struct pci_driver snbep_uncore_pci_driver = {
1146 .name = "snbep_uncore",
1147 .id_table = snbep_uncore_pci_ids,
1151 * build pci bus to socket mapping
1153 static int snbep_pci2phy_map_init(int devid)
1155 struct pci_dev *ubox_dev = NULL;
1156 int i, bus, nodeid, segment;
1157 struct pci2phy_map *map;
1162 /* find the UBOX device */
1163 ubox_dev = pci_get_device(PCI_VENDOR_ID_INTEL, devid, ubox_dev);
1166 bus = ubox_dev->bus->number;
1167 /* get the Node ID of the local register */
1168 err = pci_read_config_dword(ubox_dev, 0x40, &config);
1172 /* get the Node ID mapping */
1173 err = pci_read_config_dword(ubox_dev, 0x54, &config);
1177 segment = pci_domain_nr(ubox_dev->bus);
1178 raw_spin_lock(&pci2phy_map_lock);
1179 map = __find_pci2phy_map(segment);
1181 raw_spin_unlock(&pci2phy_map_lock);
1187 * every three bits in the Node ID mapping register maps
1188 * to a particular node.
1190 for (i = 0; i < 8; i++) {
1191 if (nodeid == ((config >> (3 * i)) & 0x7)) {
1192 map->pbus_to_physid[bus] = i;
1196 raw_spin_unlock(&pci2phy_map_lock);
1201 * For PCI bus with no UBOX device, find the next bus
1202 * that has UBOX device and use its mapping.
1204 raw_spin_lock(&pci2phy_map_lock);
1205 list_for_each_entry(map, &pci2phy_map_head, list) {
1207 for (bus = 255; bus >= 0; bus--) {
1208 if (map->pbus_to_physid[bus] >= 0)
1209 i = map->pbus_to_physid[bus];
1211 map->pbus_to_physid[bus] = i;
1214 raw_spin_unlock(&pci2phy_map_lock);
1217 pci_dev_put(ubox_dev);
1219 return err ? pcibios_err_to_errno(err) : 0;
1222 int snbep_uncore_pci_init(void)
1224 int ret = snbep_pci2phy_map_init(0x3ce0);
1227 uncore_pci_uncores = snbep_pci_uncores;
1228 uncore_pci_driver = &snbep_uncore_pci_driver;
1231 /* end of Sandy Bridge-EP uncore support */
1233 /* IvyTown uncore support */
1234 static void ivbep_uncore_msr_init_box(struct intel_uncore_box *box)
1236 unsigned msr = uncore_msr_box_ctl(box);
1238 wrmsrl(msr, IVBEP_PMON_BOX_CTL_INT);
1241 static void ivbep_uncore_pci_init_box(struct intel_uncore_box *box)
1243 struct pci_dev *pdev = box->pci_dev;
1245 pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT);
1248 #define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \
1249 .init_box = ivbep_uncore_msr_init_box, \
1250 .disable_box = snbep_uncore_msr_disable_box, \
1251 .enable_box = snbep_uncore_msr_enable_box, \
1252 .disable_event = snbep_uncore_msr_disable_event, \
1253 .enable_event = snbep_uncore_msr_enable_event, \
1254 .read_counter = uncore_msr_read_counter
1256 static struct intel_uncore_ops ivbep_uncore_msr_ops = {
1257 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1260 static struct intel_uncore_ops ivbep_uncore_pci_ops = {
1261 .init_box = ivbep_uncore_pci_init_box,
1262 .disable_box = snbep_uncore_pci_disable_box,
1263 .enable_box = snbep_uncore_pci_enable_box,
1264 .disable_event = snbep_uncore_pci_disable_event,
1265 .enable_event = snbep_uncore_pci_enable_event,
1266 .read_counter = snbep_uncore_pci_read_counter,
1269 #define IVBEP_UNCORE_PCI_COMMON_INIT() \
1270 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
1271 .event_ctl = SNBEP_PCI_PMON_CTL0, \
1272 .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \
1273 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
1274 .ops = &ivbep_uncore_pci_ops, \
1275 .format_group = &ivbep_uncore_format_group
1277 static struct attribute *ivbep_uncore_formats_attr[] = {
1278 &format_attr_event.attr,
1279 &format_attr_umask.attr,
1280 &format_attr_edge.attr,
1281 &format_attr_inv.attr,
1282 &format_attr_thresh8.attr,
1286 static struct attribute *ivbep_uncore_ubox_formats_attr[] = {
1287 &format_attr_event.attr,
1288 &format_attr_umask.attr,
1289 &format_attr_edge.attr,
1290 &format_attr_inv.attr,
1291 &format_attr_thresh5.attr,
1295 static struct attribute *ivbep_uncore_cbox_formats_attr[] = {
1296 &format_attr_event.attr,
1297 &format_attr_umask.attr,
1298 &format_attr_edge.attr,
1299 &format_attr_tid_en.attr,
1300 &format_attr_thresh8.attr,
1301 &format_attr_filter_tid.attr,
1302 &format_attr_filter_link.attr,
1303 &format_attr_filter_state2.attr,
1304 &format_attr_filter_nid2.attr,
1305 &format_attr_filter_opc2.attr,
1306 &format_attr_filter_nc.attr,
1307 &format_attr_filter_c6.attr,
1308 &format_attr_filter_isoc.attr,
1312 static struct attribute *ivbep_uncore_pcu_formats_attr[] = {
1313 &format_attr_event.attr,
1314 &format_attr_occ_sel.attr,
1315 &format_attr_edge.attr,
1316 &format_attr_thresh5.attr,
1317 &format_attr_occ_invert.attr,
1318 &format_attr_occ_edge.attr,
1319 &format_attr_filter_band0.attr,
1320 &format_attr_filter_band1.attr,
1321 &format_attr_filter_band2.attr,
1322 &format_attr_filter_band3.attr,
1326 static struct attribute *ivbep_uncore_qpi_formats_attr[] = {
1327 &format_attr_event_ext.attr,
1328 &format_attr_umask.attr,
1329 &format_attr_edge.attr,
1330 &format_attr_thresh8.attr,
1331 &format_attr_match_rds.attr,
1332 &format_attr_match_rnid30.attr,
1333 &format_attr_match_rnid4.attr,
1334 &format_attr_match_dnid.attr,
1335 &format_attr_match_mc.attr,
1336 &format_attr_match_opc.attr,
1337 &format_attr_match_vnw.attr,
1338 &format_attr_match0.attr,
1339 &format_attr_match1.attr,
1340 &format_attr_mask_rds.attr,
1341 &format_attr_mask_rnid30.attr,
1342 &format_attr_mask_rnid4.attr,
1343 &format_attr_mask_dnid.attr,
1344 &format_attr_mask_mc.attr,
1345 &format_attr_mask_opc.attr,
1346 &format_attr_mask_vnw.attr,
1347 &format_attr_mask0.attr,
1348 &format_attr_mask1.attr,
1352 static struct attribute_group ivbep_uncore_format_group = {
1354 .attrs = ivbep_uncore_formats_attr,
1357 static struct attribute_group ivbep_uncore_ubox_format_group = {
1359 .attrs = ivbep_uncore_ubox_formats_attr,
1362 static struct attribute_group ivbep_uncore_cbox_format_group = {
1364 .attrs = ivbep_uncore_cbox_formats_attr,
1367 static struct attribute_group ivbep_uncore_pcu_format_group = {
1369 .attrs = ivbep_uncore_pcu_formats_attr,
1372 static struct attribute_group ivbep_uncore_qpi_format_group = {
1374 .attrs = ivbep_uncore_qpi_formats_attr,
1377 static struct intel_uncore_type ivbep_uncore_ubox = {
1381 .perf_ctr_bits = 44,
1382 .fixed_ctr_bits = 48,
1383 .perf_ctr = SNBEP_U_MSR_PMON_CTR0,
1384 .event_ctl = SNBEP_U_MSR_PMON_CTL0,
1385 .event_mask = IVBEP_U_MSR_PMON_RAW_EVENT_MASK,
1386 .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR,
1387 .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL,
1388 .ops = &ivbep_uncore_msr_ops,
1389 .format_group = &ivbep_uncore_ubox_format_group,
1392 static struct extra_reg ivbep_uncore_cbox_extra_regs[] = {
1393 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
1394 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
1395 SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2),
1396 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
1397 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc),
1398 SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc),
1399 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
1400 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc),
1401 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
1402 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc),
1403 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
1404 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc),
1405 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10),
1406 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
1407 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
1408 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
1409 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
1410 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
1411 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
1412 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
1413 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
1414 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
1415 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
1416 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
1417 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
1418 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
1419 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
1420 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
1421 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
1422 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
1423 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
1424 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
1425 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
1426 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
1427 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
1428 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
1429 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
1433 static u64 ivbep_cbox_filter_mask(int fields)
1438 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID;
1440 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK;
1442 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE;
1444 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID;
1445 if (fields & 0x10) {
1446 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC;
1447 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC;
1448 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6;
1449 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC;
1455 static struct event_constraint *
1456 ivbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
1458 return __snbep_cbox_get_constraint(box, event, ivbep_cbox_filter_mask);
1461 static int ivbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
1463 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
1464 struct extra_reg *er;
1467 for (er = ivbep_uncore_cbox_extra_regs; er->msr; er++) {
1468 if (er->event != (event->hw.config & er->config_mask))
1474 reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER +
1475 SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
1476 reg1->config = event->attr.config1 & ivbep_cbox_filter_mask(idx);
1482 static void ivbep_cbox_enable_event(struct intel_uncore_box *box, struct perf_event *event)
1484 struct hw_perf_event *hwc = &event->hw;
1485 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
1487 if (reg1->idx != EXTRA_REG_NONE) {
1488 u64 filter = uncore_shared_reg_config(box, 0);
1489 wrmsrl(reg1->reg, filter & 0xffffffff);
1490 wrmsrl(reg1->reg + 6, filter >> 32);
1493 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
1496 static struct intel_uncore_ops ivbep_uncore_cbox_ops = {
1497 .init_box = ivbep_uncore_msr_init_box,
1498 .disable_box = snbep_uncore_msr_disable_box,
1499 .enable_box = snbep_uncore_msr_enable_box,
1500 .disable_event = snbep_uncore_msr_disable_event,
1501 .enable_event = ivbep_cbox_enable_event,
1502 .read_counter = uncore_msr_read_counter,
1503 .hw_config = ivbep_cbox_hw_config,
1504 .get_constraint = ivbep_cbox_get_constraint,
1505 .put_constraint = snbep_cbox_put_constraint,
1508 static struct intel_uncore_type ivbep_uncore_cbox = {
1512 .perf_ctr_bits = 44,
1513 .event_ctl = SNBEP_C0_MSR_PMON_CTL0,
1514 .perf_ctr = SNBEP_C0_MSR_PMON_CTR0,
1515 .event_mask = IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
1516 .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL,
1517 .msr_offset = SNBEP_CBO_MSR_OFFSET,
1518 .num_shared_regs = 1,
1519 .constraints = snbep_uncore_cbox_constraints,
1520 .ops = &ivbep_uncore_cbox_ops,
1521 .format_group = &ivbep_uncore_cbox_format_group,
1524 static struct intel_uncore_ops ivbep_uncore_pcu_ops = {
1525 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1526 .hw_config = snbep_pcu_hw_config,
1527 .get_constraint = snbep_pcu_get_constraint,
1528 .put_constraint = snbep_pcu_put_constraint,
1531 static struct intel_uncore_type ivbep_uncore_pcu = {
1535 .perf_ctr_bits = 48,
1536 .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0,
1537 .event_ctl = SNBEP_PCU_MSR_PMON_CTL0,
1538 .event_mask = IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
1539 .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL,
1540 .num_shared_regs = 1,
1541 .ops = &ivbep_uncore_pcu_ops,
1542 .format_group = &ivbep_uncore_pcu_format_group,
1545 static struct intel_uncore_type *ivbep_msr_uncores[] = {
1552 void ivbep_uncore_cpu_init(void)
1554 if (ivbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
1555 ivbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
1556 uncore_msr_uncores = ivbep_msr_uncores;
1559 static struct intel_uncore_type ivbep_uncore_ha = {
1563 .perf_ctr_bits = 48,
1564 IVBEP_UNCORE_PCI_COMMON_INIT(),
1567 static struct intel_uncore_type ivbep_uncore_imc = {
1571 .perf_ctr_bits = 48,
1572 .fixed_ctr_bits = 48,
1573 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
1574 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
1575 .event_descs = snbep_uncore_imc_events,
1576 IVBEP_UNCORE_PCI_COMMON_INIT(),
1579 /* registers in IRP boxes are not properly aligned */
1580 static unsigned ivbep_uncore_irp_ctls[] = {0xd8, 0xdc, 0xe0, 0xe4};
1581 static unsigned ivbep_uncore_irp_ctrs[] = {0xa0, 0xb0, 0xb8, 0xc0};
1583 static void ivbep_uncore_irp_enable_event(struct intel_uncore_box *box, struct perf_event *event)
1585 struct pci_dev *pdev = box->pci_dev;
1586 struct hw_perf_event *hwc = &event->hw;
1588 pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx],
1589 hwc->config | SNBEP_PMON_CTL_EN);
1592 static void ivbep_uncore_irp_disable_event(struct intel_uncore_box *box, struct perf_event *event)
1594 struct pci_dev *pdev = box->pci_dev;
1595 struct hw_perf_event *hwc = &event->hw;
1597 pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config);
1600 static u64 ivbep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event)
1602 struct pci_dev *pdev = box->pci_dev;
1603 struct hw_perf_event *hwc = &event->hw;
1606 pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count);
1607 pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1);
1612 static struct intel_uncore_ops ivbep_uncore_irp_ops = {
1613 .init_box = ivbep_uncore_pci_init_box,
1614 .disable_box = snbep_uncore_pci_disable_box,
1615 .enable_box = snbep_uncore_pci_enable_box,
1616 .disable_event = ivbep_uncore_irp_disable_event,
1617 .enable_event = ivbep_uncore_irp_enable_event,
1618 .read_counter = ivbep_uncore_irp_read_counter,
1621 static struct intel_uncore_type ivbep_uncore_irp = {
1625 .perf_ctr_bits = 48,
1626 .event_mask = IVBEP_PMON_RAW_EVENT_MASK,
1627 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1628 .ops = &ivbep_uncore_irp_ops,
1629 .format_group = &ivbep_uncore_format_group,
1632 static struct intel_uncore_ops ivbep_uncore_qpi_ops = {
1633 .init_box = ivbep_uncore_pci_init_box,
1634 .disable_box = snbep_uncore_pci_disable_box,
1635 .enable_box = snbep_uncore_pci_enable_box,
1636 .disable_event = snbep_uncore_pci_disable_event,
1637 .enable_event = snbep_qpi_enable_event,
1638 .read_counter = snbep_uncore_pci_read_counter,
1639 .hw_config = snbep_qpi_hw_config,
1640 .get_constraint = uncore_get_constraint,
1641 .put_constraint = uncore_put_constraint,
1644 static struct intel_uncore_type ivbep_uncore_qpi = {
1648 .perf_ctr_bits = 48,
1649 .perf_ctr = SNBEP_PCI_PMON_CTR0,
1650 .event_ctl = SNBEP_PCI_PMON_CTL0,
1651 .event_mask = IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
1652 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1653 .num_shared_regs = 1,
1654 .ops = &ivbep_uncore_qpi_ops,
1655 .format_group = &ivbep_uncore_qpi_format_group,
1658 static struct intel_uncore_type ivbep_uncore_r2pcie = {
1662 .perf_ctr_bits = 44,
1663 .constraints = snbep_uncore_r2pcie_constraints,
1664 IVBEP_UNCORE_PCI_COMMON_INIT(),
1667 static struct intel_uncore_type ivbep_uncore_r3qpi = {
1671 .perf_ctr_bits = 44,
1672 .constraints = snbep_uncore_r3qpi_constraints,
1673 IVBEP_UNCORE_PCI_COMMON_INIT(),
1677 IVBEP_PCI_UNCORE_HA,
1678 IVBEP_PCI_UNCORE_IMC,
1679 IVBEP_PCI_UNCORE_IRP,
1680 IVBEP_PCI_UNCORE_QPI,
1681 IVBEP_PCI_UNCORE_R2PCIE,
1682 IVBEP_PCI_UNCORE_R3QPI,
1685 static struct intel_uncore_type *ivbep_pci_uncores[] = {
1686 [IVBEP_PCI_UNCORE_HA] = &ivbep_uncore_ha,
1687 [IVBEP_PCI_UNCORE_IMC] = &ivbep_uncore_imc,
1688 [IVBEP_PCI_UNCORE_IRP] = &ivbep_uncore_irp,
1689 [IVBEP_PCI_UNCORE_QPI] = &ivbep_uncore_qpi,
1690 [IVBEP_PCI_UNCORE_R2PCIE] = &ivbep_uncore_r2pcie,
1691 [IVBEP_PCI_UNCORE_R3QPI] = &ivbep_uncore_r3qpi,
1695 static const struct pci_device_id ivbep_uncore_pci_ids[] = {
1696 { /* Home Agent 0 */
1697 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe30),
1698 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 0),
1700 { /* Home Agent 1 */
1701 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe38),
1702 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 1),
1704 { /* MC0 Channel 0 */
1705 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb4),
1706 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 0),
1708 { /* MC0 Channel 1 */
1709 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb5),
1710 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 1),
1712 { /* MC0 Channel 3 */
1713 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb0),
1714 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 2),
1716 { /* MC0 Channel 4 */
1717 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb1),
1718 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 3),
1720 { /* MC1 Channel 0 */
1721 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef4),
1722 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 4),
1724 { /* MC1 Channel 1 */
1725 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef5),
1726 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 5),
1728 { /* MC1 Channel 3 */
1729 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef0),
1730 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 6),
1732 { /* MC1 Channel 4 */
1733 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef1),
1734 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 7),
1737 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe39),
1738 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP, 0),
1741 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe32),
1742 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 0),
1745 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe33),
1746 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 1),
1749 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3a),
1750 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 2),
1753 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe34),
1754 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE, 0),
1756 { /* R3QPI0 Link 0 */
1757 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe36),
1758 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 0),
1760 { /* R3QPI0 Link 1 */
1761 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe37),
1762 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 1),
1764 { /* R3QPI1 Link 2 */
1765 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3e),
1766 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 2),
1768 { /* QPI Port 0 filter */
1769 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe86),
1770 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1771 SNBEP_PCI_QPI_PORT0_FILTER),
1773 { /* QPI Port 0 filter */
1774 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe96),
1775 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1776 SNBEP_PCI_QPI_PORT1_FILTER),
1778 { /* end: all zeroes */ }
1781 static struct pci_driver ivbep_uncore_pci_driver = {
1782 .name = "ivbep_uncore",
1783 .id_table = ivbep_uncore_pci_ids,
1786 int ivbep_uncore_pci_init(void)
1788 int ret = snbep_pci2phy_map_init(0x0e1e);
1791 uncore_pci_uncores = ivbep_pci_uncores;
1792 uncore_pci_driver = &ivbep_uncore_pci_driver;
1795 /* end of IvyTown uncore support */
1797 /* KNL uncore support */
1798 static struct attribute *knl_uncore_ubox_formats_attr[] = {
1799 &format_attr_event.attr,
1800 &format_attr_umask.attr,
1801 &format_attr_edge.attr,
1802 &format_attr_tid_en.attr,
1803 &format_attr_inv.attr,
1804 &format_attr_thresh5.attr,
1808 static struct attribute_group knl_uncore_ubox_format_group = {
1810 .attrs = knl_uncore_ubox_formats_attr,
1813 static struct intel_uncore_type knl_uncore_ubox = {
1817 .perf_ctr_bits = 48,
1818 .fixed_ctr_bits = 48,
1819 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
1820 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
1821 .event_mask = KNL_U_MSR_PMON_RAW_EVENT_MASK,
1822 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
1823 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
1824 .ops = &snbep_uncore_msr_ops,
1825 .format_group = &knl_uncore_ubox_format_group,
1828 static struct attribute *knl_uncore_cha_formats_attr[] = {
1829 &format_attr_event.attr,
1830 &format_attr_umask.attr,
1831 &format_attr_qor.attr,
1832 &format_attr_edge.attr,
1833 &format_attr_tid_en.attr,
1834 &format_attr_inv.attr,
1835 &format_attr_thresh8.attr,
1836 &format_attr_filter_tid4.attr,
1837 &format_attr_filter_link3.attr,
1838 &format_attr_filter_state4.attr,
1839 &format_attr_filter_local.attr,
1840 &format_attr_filter_all_op.attr,
1841 &format_attr_filter_nnm.attr,
1842 &format_attr_filter_opc3.attr,
1843 &format_attr_filter_nc.attr,
1844 &format_attr_filter_isoc.attr,
1848 static struct attribute_group knl_uncore_cha_format_group = {
1850 .attrs = knl_uncore_cha_formats_attr,
1853 static struct event_constraint knl_uncore_cha_constraints[] = {
1854 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
1855 UNCORE_EVENT_CONSTRAINT(0x1f, 0x1),
1856 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
1857 EVENT_CONSTRAINT_END
1860 static struct extra_reg knl_uncore_cha_extra_regs[] = {
1861 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
1862 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
1863 SNBEP_CBO_EVENT_EXTRA_REG(0x3d, 0xff, 0x2),
1864 SNBEP_CBO_EVENT_EXTRA_REG(0x35, 0xff, 0x4),
1865 SNBEP_CBO_EVENT_EXTRA_REG(0x36, 0xff, 0x4),
1869 static u64 knl_cha_filter_mask(int fields)
1874 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_TID;
1876 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_STATE;
1878 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_OP;
1882 static struct event_constraint *
1883 knl_cha_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
1885 return __snbep_cbox_get_constraint(box, event, knl_cha_filter_mask);
1888 static int knl_cha_hw_config(struct intel_uncore_box *box,
1889 struct perf_event *event)
1891 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
1892 struct extra_reg *er;
1895 for (er = knl_uncore_cha_extra_regs; er->msr; er++) {
1896 if (er->event != (event->hw.config & er->config_mask))
1902 reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 +
1903 KNL_CHA_MSR_OFFSET * box->pmu->pmu_idx;
1904 reg1->config = event->attr.config1 & knl_cha_filter_mask(idx);
1910 static void hswep_cbox_enable_event(struct intel_uncore_box *box,
1911 struct perf_event *event);
1913 static struct intel_uncore_ops knl_uncore_cha_ops = {
1914 .init_box = snbep_uncore_msr_init_box,
1915 .disable_box = snbep_uncore_msr_disable_box,
1916 .enable_box = snbep_uncore_msr_enable_box,
1917 .disable_event = snbep_uncore_msr_disable_event,
1918 .enable_event = hswep_cbox_enable_event,
1919 .read_counter = uncore_msr_read_counter,
1920 .hw_config = knl_cha_hw_config,
1921 .get_constraint = knl_cha_get_constraint,
1922 .put_constraint = snbep_cbox_put_constraint,
1925 static struct intel_uncore_type knl_uncore_cha = {
1929 .perf_ctr_bits = 48,
1930 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
1931 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
1932 .event_mask = KNL_CHA_MSR_PMON_RAW_EVENT_MASK,
1933 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
1934 .msr_offset = KNL_CHA_MSR_OFFSET,
1935 .num_shared_regs = 1,
1936 .constraints = knl_uncore_cha_constraints,
1937 .ops = &knl_uncore_cha_ops,
1938 .format_group = &knl_uncore_cha_format_group,
1941 static struct attribute *knl_uncore_pcu_formats_attr[] = {
1942 &format_attr_event2.attr,
1943 &format_attr_use_occ_ctr.attr,
1944 &format_attr_occ_sel.attr,
1945 &format_attr_edge.attr,
1946 &format_attr_tid_en.attr,
1947 &format_attr_inv.attr,
1948 &format_attr_thresh6.attr,
1949 &format_attr_occ_invert.attr,
1950 &format_attr_occ_edge_det.attr,
1954 static struct attribute_group knl_uncore_pcu_format_group = {
1956 .attrs = knl_uncore_pcu_formats_attr,
1959 static struct intel_uncore_type knl_uncore_pcu = {
1963 .perf_ctr_bits = 48,
1964 .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0,
1965 .event_ctl = HSWEP_PCU_MSR_PMON_CTL0,
1966 .event_mask = KNL_PCU_MSR_PMON_RAW_EVENT_MASK,
1967 .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL,
1968 .ops = &snbep_uncore_msr_ops,
1969 .format_group = &knl_uncore_pcu_format_group,
1972 static struct intel_uncore_type *knl_msr_uncores[] = {
1979 void knl_uncore_cpu_init(void)
1981 uncore_msr_uncores = knl_msr_uncores;
1984 static void knl_uncore_imc_enable_box(struct intel_uncore_box *box)
1986 struct pci_dev *pdev = box->pci_dev;
1987 int box_ctl = uncore_pci_box_ctl(box);
1989 pci_write_config_dword(pdev, box_ctl, 0);
1992 static void knl_uncore_imc_enable_event(struct intel_uncore_box *box,
1993 struct perf_event *event)
1995 struct pci_dev *pdev = box->pci_dev;
1996 struct hw_perf_event *hwc = &event->hw;
1998 if ((event->attr.config & SNBEP_PMON_CTL_EV_SEL_MASK)
1999 == UNCORE_FIXED_EVENT)
2000 pci_write_config_dword(pdev, hwc->config_base,
2001 hwc->config | KNL_PMON_FIXED_CTL_EN);
2003 pci_write_config_dword(pdev, hwc->config_base,
2004 hwc->config | SNBEP_PMON_CTL_EN);
2007 static struct intel_uncore_ops knl_uncore_imc_ops = {
2008 .init_box = snbep_uncore_pci_init_box,
2009 .disable_box = snbep_uncore_pci_disable_box,
2010 .enable_box = knl_uncore_imc_enable_box,
2011 .read_counter = snbep_uncore_pci_read_counter,
2012 .enable_event = knl_uncore_imc_enable_event,
2013 .disable_event = snbep_uncore_pci_disable_event,
2016 static struct intel_uncore_type knl_uncore_imc_uclk = {
2020 .perf_ctr_bits = 48,
2021 .fixed_ctr_bits = 48,
2022 .perf_ctr = KNL_UCLK_MSR_PMON_CTR0_LOW,
2023 .event_ctl = KNL_UCLK_MSR_PMON_CTL0,
2024 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2025 .fixed_ctr = KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW,
2026 .fixed_ctl = KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL,
2027 .box_ctl = KNL_UCLK_MSR_PMON_BOX_CTL,
2028 .ops = &knl_uncore_imc_ops,
2029 .format_group = &snbep_uncore_format_group,
2032 static struct intel_uncore_type knl_uncore_imc_dclk = {
2036 .perf_ctr_bits = 48,
2037 .fixed_ctr_bits = 48,
2038 .perf_ctr = KNL_MC0_CH0_MSR_PMON_CTR0_LOW,
2039 .event_ctl = KNL_MC0_CH0_MSR_PMON_CTL0,
2040 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2041 .fixed_ctr = KNL_MC0_CH0_MSR_PMON_FIXED_LOW,
2042 .fixed_ctl = KNL_MC0_CH0_MSR_PMON_FIXED_CTL,
2043 .box_ctl = KNL_MC0_CH0_MSR_PMON_BOX_CTL,
2044 .ops = &knl_uncore_imc_ops,
2045 .format_group = &snbep_uncore_format_group,
2048 static struct intel_uncore_type knl_uncore_edc_uclk = {
2052 .perf_ctr_bits = 48,
2053 .fixed_ctr_bits = 48,
2054 .perf_ctr = KNL_UCLK_MSR_PMON_CTR0_LOW,
2055 .event_ctl = KNL_UCLK_MSR_PMON_CTL0,
2056 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2057 .fixed_ctr = KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW,
2058 .fixed_ctl = KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL,
2059 .box_ctl = KNL_UCLK_MSR_PMON_BOX_CTL,
2060 .ops = &knl_uncore_imc_ops,
2061 .format_group = &snbep_uncore_format_group,
2064 static struct intel_uncore_type knl_uncore_edc_eclk = {
2068 .perf_ctr_bits = 48,
2069 .fixed_ctr_bits = 48,
2070 .perf_ctr = KNL_EDC0_ECLK_MSR_PMON_CTR0_LOW,
2071 .event_ctl = KNL_EDC0_ECLK_MSR_PMON_CTL0,
2072 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2073 .fixed_ctr = KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_LOW,
2074 .fixed_ctl = KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_CTL,
2075 .box_ctl = KNL_EDC0_ECLK_MSR_PMON_BOX_CTL,
2076 .ops = &knl_uncore_imc_ops,
2077 .format_group = &snbep_uncore_format_group,
2080 static struct event_constraint knl_uncore_m2pcie_constraints[] = {
2081 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2082 EVENT_CONSTRAINT_END
2085 static struct intel_uncore_type knl_uncore_m2pcie = {
2089 .perf_ctr_bits = 48,
2090 .constraints = knl_uncore_m2pcie_constraints,
2091 SNBEP_UNCORE_PCI_COMMON_INIT(),
2094 static struct attribute *knl_uncore_irp_formats_attr[] = {
2095 &format_attr_event.attr,
2096 &format_attr_umask.attr,
2097 &format_attr_qor.attr,
2098 &format_attr_edge.attr,
2099 &format_attr_inv.attr,
2100 &format_attr_thresh8.attr,
2104 static struct attribute_group knl_uncore_irp_format_group = {
2106 .attrs = knl_uncore_irp_formats_attr,
2109 static struct intel_uncore_type knl_uncore_irp = {
2113 .perf_ctr_bits = 48,
2114 .perf_ctr = SNBEP_PCI_PMON_CTR0,
2115 .event_ctl = SNBEP_PCI_PMON_CTL0,
2116 .event_mask = KNL_IRP_PCI_PMON_RAW_EVENT_MASK,
2117 .box_ctl = KNL_IRP_PCI_PMON_BOX_CTL,
2118 .ops = &snbep_uncore_pci_ops,
2119 .format_group = &knl_uncore_irp_format_group,
2123 KNL_PCI_UNCORE_MC_UCLK,
2124 KNL_PCI_UNCORE_MC_DCLK,
2125 KNL_PCI_UNCORE_EDC_UCLK,
2126 KNL_PCI_UNCORE_EDC_ECLK,
2127 KNL_PCI_UNCORE_M2PCIE,
2131 static struct intel_uncore_type *knl_pci_uncores[] = {
2132 [KNL_PCI_UNCORE_MC_UCLK] = &knl_uncore_imc_uclk,
2133 [KNL_PCI_UNCORE_MC_DCLK] = &knl_uncore_imc_dclk,
2134 [KNL_PCI_UNCORE_EDC_UCLK] = &knl_uncore_edc_uclk,
2135 [KNL_PCI_UNCORE_EDC_ECLK] = &knl_uncore_edc_eclk,
2136 [KNL_PCI_UNCORE_M2PCIE] = &knl_uncore_m2pcie,
2137 [KNL_PCI_UNCORE_IRP] = &knl_uncore_irp,
2142 * KNL uses a common PCI device ID for multiple instances of an Uncore PMU
2143 * device type. prior to KNL, each instance of a PMU device type had a unique
2146 * PCI Device ID Uncore PMU Devices
2147 * ----------------------------------
2148 * 0x7841 MC0 UClk, MC1 UClk
2149 * 0x7843 MC0 DClk CH 0, MC0 DClk CH 1, MC0 DClk CH 2,
2150 * MC1 DClk CH 0, MC1 DClk CH 1, MC1 DClk CH 2
2151 * 0x7833 EDC0 UClk, EDC1 UClk, EDC2 UClk, EDC3 UClk,
2152 * EDC4 UClk, EDC5 UClk, EDC6 UClk, EDC7 UClk
2153 * 0x7835 EDC0 EClk, EDC1 EClk, EDC2 EClk, EDC3 EClk,
2154 * EDC4 EClk, EDC5 EClk, EDC6 EClk, EDC7 EClk
2159 static const struct pci_device_id knl_uncore_pci_ids[] = {
2161 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7841),
2162 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_MC_UCLK, 0),
2164 { /* MC DClk Channel */
2165 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2166 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_MC_DCLK, 0),
2169 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2170 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_EDC_UCLK, 0),
2173 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2174 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_EDC_ECLK, 0),
2177 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7817),
2178 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_M2PCIE, 0),
2181 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7814),
2182 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_IRP, 0),
2184 { /* end: all zeroes */ }
2187 static struct pci_driver knl_uncore_pci_driver = {
2188 .name = "knl_uncore",
2189 .id_table = knl_uncore_pci_ids,
2192 int knl_uncore_pci_init(void)
2196 /* All KNL PCI based PMON units are on the same PCI bus except IRP */
2197 ret = snb_pci2phy_map_init(0x7814); /* IRP */
2200 ret = snb_pci2phy_map_init(0x7817); /* M2PCIe */
2203 uncore_pci_uncores = knl_pci_uncores;
2204 uncore_pci_driver = &knl_uncore_pci_driver;
2208 /* end of KNL uncore support */
2210 /* Haswell-EP uncore support */
2211 static struct attribute *hswep_uncore_ubox_formats_attr[] = {
2212 &format_attr_event.attr,
2213 &format_attr_umask.attr,
2214 &format_attr_edge.attr,
2215 &format_attr_inv.attr,
2216 &format_attr_thresh5.attr,
2217 &format_attr_filter_tid2.attr,
2218 &format_attr_filter_cid.attr,
2222 static struct attribute_group hswep_uncore_ubox_format_group = {
2224 .attrs = hswep_uncore_ubox_formats_attr,
2227 static int hswep_ubox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2229 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
2230 reg1->reg = HSWEP_U_MSR_PMON_FILTER;
2231 reg1->config = event->attr.config1 & HSWEP_U_MSR_PMON_BOX_FILTER_MASK;
2236 static struct intel_uncore_ops hswep_uncore_ubox_ops = {
2237 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2238 .hw_config = hswep_ubox_hw_config,
2239 .get_constraint = uncore_get_constraint,
2240 .put_constraint = uncore_put_constraint,
2243 static struct intel_uncore_type hswep_uncore_ubox = {
2247 .perf_ctr_bits = 44,
2248 .fixed_ctr_bits = 48,
2249 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
2250 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
2251 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
2252 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
2253 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
2254 .num_shared_regs = 1,
2255 .ops = &hswep_uncore_ubox_ops,
2256 .format_group = &hswep_uncore_ubox_format_group,
2259 static struct attribute *hswep_uncore_cbox_formats_attr[] = {
2260 &format_attr_event.attr,
2261 &format_attr_umask.attr,
2262 &format_attr_edge.attr,
2263 &format_attr_tid_en.attr,
2264 &format_attr_thresh8.attr,
2265 &format_attr_filter_tid3.attr,
2266 &format_attr_filter_link2.attr,
2267 &format_attr_filter_state3.attr,
2268 &format_attr_filter_nid2.attr,
2269 &format_attr_filter_opc2.attr,
2270 &format_attr_filter_nc.attr,
2271 &format_attr_filter_c6.attr,
2272 &format_attr_filter_isoc.attr,
2276 static struct attribute_group hswep_uncore_cbox_format_group = {
2278 .attrs = hswep_uncore_cbox_formats_attr,
2281 static struct event_constraint hswep_uncore_cbox_constraints[] = {
2282 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
2283 UNCORE_EVENT_CONSTRAINT(0x09, 0x1),
2284 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
2285 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
2286 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2287 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
2288 UNCORE_EVENT_CONSTRAINT(0x3e, 0x1),
2289 EVENT_CONSTRAINT_END
2292 static struct extra_reg hswep_uncore_cbox_extra_regs[] = {
2293 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
2294 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
2295 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
2296 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
2297 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
2298 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
2299 SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4),
2300 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4),
2301 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
2302 SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8),
2303 SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8),
2304 SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8),
2305 SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8),
2306 SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8),
2307 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12),
2308 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
2309 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
2310 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
2311 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
2312 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
2313 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
2314 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
2315 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
2316 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
2317 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
2318 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
2319 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
2320 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
2321 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
2322 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
2323 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
2324 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
2325 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
2326 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
2327 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
2328 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
2329 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
2330 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
2334 static u64 hswep_cbox_filter_mask(int fields)
2338 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID;
2340 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK;
2342 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE;
2344 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID;
2345 if (fields & 0x10) {
2346 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC;
2347 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC;
2348 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6;
2349 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC;
2354 static struct event_constraint *
2355 hswep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
2357 return __snbep_cbox_get_constraint(box, event, hswep_cbox_filter_mask);
2360 static int hswep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2362 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
2363 struct extra_reg *er;
2366 for (er = hswep_uncore_cbox_extra_regs; er->msr; er++) {
2367 if (er->event != (event->hw.config & er->config_mask))
2373 reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 +
2374 HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
2375 reg1->config = event->attr.config1 & hswep_cbox_filter_mask(idx);
2381 static void hswep_cbox_enable_event(struct intel_uncore_box *box,
2382 struct perf_event *event)
2384 struct hw_perf_event *hwc = &event->hw;
2385 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
2387 if (reg1->idx != EXTRA_REG_NONE) {
2388 u64 filter = uncore_shared_reg_config(box, 0);
2389 wrmsrl(reg1->reg, filter & 0xffffffff);
2390 wrmsrl(reg1->reg + 1, filter >> 32);
2393 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
2396 static struct intel_uncore_ops hswep_uncore_cbox_ops = {
2397 .init_box = snbep_uncore_msr_init_box,
2398 .disable_box = snbep_uncore_msr_disable_box,
2399 .enable_box = snbep_uncore_msr_enable_box,
2400 .disable_event = snbep_uncore_msr_disable_event,
2401 .enable_event = hswep_cbox_enable_event,
2402 .read_counter = uncore_msr_read_counter,
2403 .hw_config = hswep_cbox_hw_config,
2404 .get_constraint = hswep_cbox_get_constraint,
2405 .put_constraint = snbep_cbox_put_constraint,
2408 static struct intel_uncore_type hswep_uncore_cbox = {
2412 .perf_ctr_bits = 48,
2413 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
2414 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
2415 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
2416 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
2417 .msr_offset = HSWEP_CBO_MSR_OFFSET,
2418 .num_shared_regs = 1,
2419 .constraints = hswep_uncore_cbox_constraints,
2420 .ops = &hswep_uncore_cbox_ops,
2421 .format_group = &hswep_uncore_cbox_format_group,
2425 * Write SBOX Initialization register bit by bit to avoid spurious #GPs
2427 static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box *box)
2429 unsigned msr = uncore_msr_box_ctl(box);
2432 u64 init = SNBEP_PMON_BOX_CTL_INT;
2436 for_each_set_bit(i, (unsigned long *)&init, 64) {
2437 flags |= (1ULL << i);
2443 static struct intel_uncore_ops hswep_uncore_sbox_msr_ops = {
2444 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2445 .init_box = hswep_uncore_sbox_msr_init_box
2448 static struct attribute *hswep_uncore_sbox_formats_attr[] = {
2449 &format_attr_event.attr,
2450 &format_attr_umask.attr,
2451 &format_attr_edge.attr,
2452 &format_attr_tid_en.attr,
2453 &format_attr_inv.attr,
2454 &format_attr_thresh8.attr,
2458 static struct attribute_group hswep_uncore_sbox_format_group = {
2460 .attrs = hswep_uncore_sbox_formats_attr,
2463 static struct intel_uncore_type hswep_uncore_sbox = {
2467 .perf_ctr_bits = 44,
2468 .event_ctl = HSWEP_S0_MSR_PMON_CTL0,
2469 .perf_ctr = HSWEP_S0_MSR_PMON_CTR0,
2470 .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK,
2471 .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL,
2472 .msr_offset = HSWEP_SBOX_MSR_OFFSET,
2473 .ops = &hswep_uncore_sbox_msr_ops,
2474 .format_group = &hswep_uncore_sbox_format_group,
2477 static int hswep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2479 struct hw_perf_event *hwc = &event->hw;
2480 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
2481 int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK;
2483 if (ev_sel >= 0xb && ev_sel <= 0xe) {
2484 reg1->reg = HSWEP_PCU_MSR_PMON_BOX_FILTER;
2485 reg1->idx = ev_sel - 0xb;
2486 reg1->config = event->attr.config1 & (0xff << reg1->idx);
2491 static struct intel_uncore_ops hswep_uncore_pcu_ops = {
2492 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2493 .hw_config = hswep_pcu_hw_config,
2494 .get_constraint = snbep_pcu_get_constraint,
2495 .put_constraint = snbep_pcu_put_constraint,
2498 static struct intel_uncore_type hswep_uncore_pcu = {
2502 .perf_ctr_bits = 48,
2503 .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0,
2504 .event_ctl = HSWEP_PCU_MSR_PMON_CTL0,
2505 .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
2506 .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL,
2507 .num_shared_regs = 1,
2508 .ops = &hswep_uncore_pcu_ops,
2509 .format_group = &snbep_uncore_pcu_format_group,
2512 static struct intel_uncore_type *hswep_msr_uncores[] = {
2520 void hswep_uncore_cpu_init(void)
2522 int pkg = topology_phys_to_logical_pkg(0);
2524 if (hswep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
2525 hswep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
2527 /* Detect 6-8 core systems with only two SBOXes */
2528 if (uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3]) {
2531 pci_read_config_dword(uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3],
2533 if (((capid4 >> 6) & 0x3) == 0)
2534 hswep_uncore_sbox.num_boxes = 2;
2537 uncore_msr_uncores = hswep_msr_uncores;
2540 static struct intel_uncore_type hswep_uncore_ha = {
2544 .perf_ctr_bits = 48,
2545 SNBEP_UNCORE_PCI_COMMON_INIT(),
2548 static struct uncore_event_desc hswep_uncore_imc_events[] = {
2549 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x00,umask=0x00"),
2550 INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"),
2551 INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"),
2552 INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"),
2553 INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"),
2554 INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"),
2555 INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"),
2556 { /* end: all zeroes */ },
2559 static struct intel_uncore_type hswep_uncore_imc = {
2563 .perf_ctr_bits = 48,
2564 .fixed_ctr_bits = 48,
2565 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
2566 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
2567 .event_descs = hswep_uncore_imc_events,
2568 SNBEP_UNCORE_PCI_COMMON_INIT(),
2571 static unsigned hswep_uncore_irp_ctrs[] = {0xa0, 0xa8, 0xb0, 0xb8};
2573 static u64 hswep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event)
2575 struct pci_dev *pdev = box->pci_dev;
2576 struct hw_perf_event *hwc = &event->hw;
2579 pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count);
2580 pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1);
2585 static struct intel_uncore_ops hswep_uncore_irp_ops = {
2586 .init_box = snbep_uncore_pci_init_box,
2587 .disable_box = snbep_uncore_pci_disable_box,
2588 .enable_box = snbep_uncore_pci_enable_box,
2589 .disable_event = ivbep_uncore_irp_disable_event,
2590 .enable_event = ivbep_uncore_irp_enable_event,
2591 .read_counter = hswep_uncore_irp_read_counter,
2594 static struct intel_uncore_type hswep_uncore_irp = {
2598 .perf_ctr_bits = 48,
2599 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2600 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2601 .ops = &hswep_uncore_irp_ops,
2602 .format_group = &snbep_uncore_format_group,
2605 static struct intel_uncore_type hswep_uncore_qpi = {
2609 .perf_ctr_bits = 48,
2610 .perf_ctr = SNBEP_PCI_PMON_CTR0,
2611 .event_ctl = SNBEP_PCI_PMON_CTL0,
2612 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
2613 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2614 .num_shared_regs = 1,
2615 .ops = &snbep_uncore_qpi_ops,
2616 .format_group = &snbep_uncore_qpi_format_group,
2619 static struct event_constraint hswep_uncore_r2pcie_constraints[] = {
2620 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2621 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2622 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2623 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
2624 UNCORE_EVENT_CONSTRAINT(0x24, 0x1),
2625 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
2626 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2627 UNCORE_EVENT_CONSTRAINT(0x27, 0x1),
2628 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2629 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2630 UNCORE_EVENT_CONSTRAINT(0x2a, 0x1),
2631 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
2632 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2633 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2634 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2635 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2636 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2637 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
2638 EVENT_CONSTRAINT_END
2641 static struct intel_uncore_type hswep_uncore_r2pcie = {
2645 .perf_ctr_bits = 48,
2646 .constraints = hswep_uncore_r2pcie_constraints,
2647 SNBEP_UNCORE_PCI_COMMON_INIT(),
2650 static struct event_constraint hswep_uncore_r3qpi_constraints[] = {
2651 UNCORE_EVENT_CONSTRAINT(0x01, 0x3),
2652 UNCORE_EVENT_CONSTRAINT(0x07, 0x7),
2653 UNCORE_EVENT_CONSTRAINT(0x08, 0x7),
2654 UNCORE_EVENT_CONSTRAINT(0x09, 0x7),
2655 UNCORE_EVENT_CONSTRAINT(0x0a, 0x7),
2656 UNCORE_EVENT_CONSTRAINT(0x0e, 0x7),
2657 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2658 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2659 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
2660 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2661 UNCORE_EVENT_CONSTRAINT(0x14, 0x3),
2662 UNCORE_EVENT_CONSTRAINT(0x15, 0x3),
2663 UNCORE_EVENT_CONSTRAINT(0x1f, 0x3),
2664 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
2665 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
2666 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
2667 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2668 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
2669 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2670 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2671 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2672 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2673 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2674 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
2675 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
2676 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
2677 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2678 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2679 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2680 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
2681 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
2682 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2683 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
2684 EVENT_CONSTRAINT_END
2687 static struct intel_uncore_type hswep_uncore_r3qpi = {
2691 .perf_ctr_bits = 44,
2692 .constraints = hswep_uncore_r3qpi_constraints,
2693 SNBEP_UNCORE_PCI_COMMON_INIT(),
2697 HSWEP_PCI_UNCORE_HA,
2698 HSWEP_PCI_UNCORE_IMC,
2699 HSWEP_PCI_UNCORE_IRP,
2700 HSWEP_PCI_UNCORE_QPI,
2701 HSWEP_PCI_UNCORE_R2PCIE,
2702 HSWEP_PCI_UNCORE_R3QPI,
2705 static struct intel_uncore_type *hswep_pci_uncores[] = {
2706 [HSWEP_PCI_UNCORE_HA] = &hswep_uncore_ha,
2707 [HSWEP_PCI_UNCORE_IMC] = &hswep_uncore_imc,
2708 [HSWEP_PCI_UNCORE_IRP] = &hswep_uncore_irp,
2709 [HSWEP_PCI_UNCORE_QPI] = &hswep_uncore_qpi,
2710 [HSWEP_PCI_UNCORE_R2PCIE] = &hswep_uncore_r2pcie,
2711 [HSWEP_PCI_UNCORE_R3QPI] = &hswep_uncore_r3qpi,
2715 static const struct pci_device_id hswep_uncore_pci_ids[] = {
2716 { /* Home Agent 0 */
2717 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f30),
2718 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 0),
2720 { /* Home Agent 1 */
2721 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f38),
2722 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 1),
2724 { /* MC0 Channel 0 */
2725 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb0),
2726 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 0),
2728 { /* MC0 Channel 1 */
2729 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb1),
2730 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 1),
2732 { /* MC0 Channel 2 */
2733 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb4),
2734 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 2),
2736 { /* MC0 Channel 3 */
2737 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb5),
2738 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 3),
2740 { /* MC1 Channel 0 */
2741 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd0),
2742 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 4),
2744 { /* MC1 Channel 1 */
2745 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd1),
2746 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 5),
2748 { /* MC1 Channel 2 */
2749 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd4),
2750 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 6),
2752 { /* MC1 Channel 3 */
2753 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd5),
2754 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 7),
2757 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f39),
2758 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP, 0),
2761 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f32),
2762 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 0),
2765 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f33),
2766 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 1),
2769 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3a),
2770 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 2),
2773 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f34),
2774 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE, 0),
2776 { /* R3QPI0 Link 0 */
2777 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f36),
2778 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 0),
2780 { /* R3QPI0 Link 1 */
2781 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f37),
2782 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 1),
2784 { /* R3QPI1 Link 2 */
2785 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3e),
2786 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 2),
2788 { /* QPI Port 0 filter */
2789 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f86),
2790 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2791 SNBEP_PCI_QPI_PORT0_FILTER),
2793 { /* QPI Port 1 filter */
2794 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f96),
2795 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2796 SNBEP_PCI_QPI_PORT1_FILTER),
2798 { /* PCU.3 (for Capability registers) */
2799 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fc0),
2800 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2803 { /* end: all zeroes */ }
2806 static struct pci_driver hswep_uncore_pci_driver = {
2807 .name = "hswep_uncore",
2808 .id_table = hswep_uncore_pci_ids,
2811 int hswep_uncore_pci_init(void)
2813 int ret = snbep_pci2phy_map_init(0x2f1e);
2816 uncore_pci_uncores = hswep_pci_uncores;
2817 uncore_pci_driver = &hswep_uncore_pci_driver;
2820 /* end of Haswell-EP uncore support */
2822 /* BDX uncore support */
2824 static struct intel_uncore_type bdx_uncore_ubox = {
2828 .perf_ctr_bits = 48,
2829 .fixed_ctr_bits = 48,
2830 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
2831 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
2832 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
2833 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
2834 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
2835 .num_shared_regs = 1,
2836 .ops = &ivbep_uncore_msr_ops,
2837 .format_group = &ivbep_uncore_ubox_format_group,
2840 static struct event_constraint bdx_uncore_cbox_constraints[] = {
2841 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
2842 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
2843 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
2844 UNCORE_EVENT_CONSTRAINT(0x3e, 0x1),
2845 EVENT_CONSTRAINT_END
2848 static struct intel_uncore_type bdx_uncore_cbox = {
2852 .perf_ctr_bits = 48,
2853 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
2854 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
2855 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
2856 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
2857 .msr_offset = HSWEP_CBO_MSR_OFFSET,
2858 .num_shared_regs = 1,
2859 .constraints = bdx_uncore_cbox_constraints,
2860 .ops = &hswep_uncore_cbox_ops,
2861 .format_group = &hswep_uncore_cbox_format_group,
2864 static struct intel_uncore_type bdx_uncore_sbox = {
2868 .perf_ctr_bits = 48,
2869 .event_ctl = HSWEP_S0_MSR_PMON_CTL0,
2870 .perf_ctr = HSWEP_S0_MSR_PMON_CTR0,
2871 .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK,
2872 .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL,
2873 .msr_offset = HSWEP_SBOX_MSR_OFFSET,
2874 .ops = &hswep_uncore_sbox_msr_ops,
2875 .format_group = &hswep_uncore_sbox_format_group,
2878 #define BDX_MSR_UNCORE_SBOX 3
2880 static struct intel_uncore_type *bdx_msr_uncores[] = {
2888 void bdx_uncore_cpu_init(void)
2890 if (bdx_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
2891 bdx_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
2892 uncore_msr_uncores = bdx_msr_uncores;
2894 /* BDX-DE doesn't have SBOX */
2895 if (boot_cpu_data.x86_model == 86)
2896 uncore_msr_uncores[BDX_MSR_UNCORE_SBOX] = NULL;
2899 static struct intel_uncore_type bdx_uncore_ha = {
2903 .perf_ctr_bits = 48,
2904 SNBEP_UNCORE_PCI_COMMON_INIT(),
2907 static struct intel_uncore_type bdx_uncore_imc = {
2911 .perf_ctr_bits = 48,
2912 .fixed_ctr_bits = 48,
2913 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
2914 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
2915 .event_descs = hswep_uncore_imc_events,
2916 SNBEP_UNCORE_PCI_COMMON_INIT(),
2919 static struct intel_uncore_type bdx_uncore_irp = {
2923 .perf_ctr_bits = 48,
2924 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2925 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2926 .ops = &hswep_uncore_irp_ops,
2927 .format_group = &snbep_uncore_format_group,
2930 static struct intel_uncore_type bdx_uncore_qpi = {
2934 .perf_ctr_bits = 48,
2935 .perf_ctr = SNBEP_PCI_PMON_CTR0,
2936 .event_ctl = SNBEP_PCI_PMON_CTL0,
2937 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
2938 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2939 .num_shared_regs = 1,
2940 .ops = &snbep_uncore_qpi_ops,
2941 .format_group = &snbep_uncore_qpi_format_group,
2944 static struct event_constraint bdx_uncore_r2pcie_constraints[] = {
2945 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2946 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2947 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2948 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
2949 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
2950 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2951 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2952 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2953 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2954 EVENT_CONSTRAINT_END
2957 static struct intel_uncore_type bdx_uncore_r2pcie = {
2961 .perf_ctr_bits = 48,
2962 .constraints = bdx_uncore_r2pcie_constraints,
2963 SNBEP_UNCORE_PCI_COMMON_INIT(),
2966 static struct event_constraint bdx_uncore_r3qpi_constraints[] = {
2967 UNCORE_EVENT_CONSTRAINT(0x01, 0x7),
2968 UNCORE_EVENT_CONSTRAINT(0x07, 0x7),
2969 UNCORE_EVENT_CONSTRAINT(0x08, 0x7),
2970 UNCORE_EVENT_CONSTRAINT(0x09, 0x7),
2971 UNCORE_EVENT_CONSTRAINT(0x0a, 0x7),
2972 UNCORE_EVENT_CONSTRAINT(0x0e, 0x7),
2973 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2974 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2975 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2976 UNCORE_EVENT_CONSTRAINT(0x14, 0x3),
2977 UNCORE_EVENT_CONSTRAINT(0x15, 0x3),
2978 UNCORE_EVENT_CONSTRAINT(0x1f, 0x3),
2979 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
2980 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
2981 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
2982 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2983 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
2984 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2985 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2986 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2987 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2988 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2989 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
2990 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
2991 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2992 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2993 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
2994 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
2995 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2996 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
2997 EVENT_CONSTRAINT_END
3000 static struct intel_uncore_type bdx_uncore_r3qpi = {
3004 .perf_ctr_bits = 48,
3005 .constraints = bdx_uncore_r3qpi_constraints,
3006 SNBEP_UNCORE_PCI_COMMON_INIT(),
3014 BDX_PCI_UNCORE_R2PCIE,
3015 BDX_PCI_UNCORE_R3QPI,
3018 static struct intel_uncore_type *bdx_pci_uncores[] = {
3019 [BDX_PCI_UNCORE_HA] = &bdx_uncore_ha,
3020 [BDX_PCI_UNCORE_IMC] = &bdx_uncore_imc,
3021 [BDX_PCI_UNCORE_IRP] = &bdx_uncore_irp,
3022 [BDX_PCI_UNCORE_QPI] = &bdx_uncore_qpi,
3023 [BDX_PCI_UNCORE_R2PCIE] = &bdx_uncore_r2pcie,
3024 [BDX_PCI_UNCORE_R3QPI] = &bdx_uncore_r3qpi,
3028 static const struct pci_device_id bdx_uncore_pci_ids[] = {
3029 { /* Home Agent 0 */
3030 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f30),
3031 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA, 0),
3033 { /* Home Agent 1 */
3034 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f38),
3035 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA, 1),
3037 { /* MC0 Channel 0 */
3038 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb0),
3039 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 0),
3041 { /* MC0 Channel 1 */
3042 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb1),
3043 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 1),
3045 { /* MC0 Channel 2 */
3046 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb4),
3047 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 2),
3049 { /* MC0 Channel 3 */
3050 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb5),
3051 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 3),
3053 { /* MC1 Channel 0 */
3054 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd0),
3055 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 4),
3057 { /* MC1 Channel 1 */
3058 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd1),
3059 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 5),
3061 { /* MC1 Channel 2 */
3062 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd4),
3063 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 6),
3065 { /* MC1 Channel 3 */
3066 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd5),
3067 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 7),
3070 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f39),
3071 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IRP, 0),
3074 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f32),
3075 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 0),
3078 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f33),
3079 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 1),
3082 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f3a),
3083 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 2),
3086 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f34),
3087 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R2PCIE, 0),
3089 { /* R3QPI0 Link 0 */
3090 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f36),
3091 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 0),
3093 { /* R3QPI0 Link 1 */
3094 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f37),
3095 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 1),
3097 { /* R3QPI1 Link 2 */
3098 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f3e),
3099 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 2),
3101 { /* QPI Port 0 filter */
3102 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f86),
3103 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, 0),
3105 { /* QPI Port 1 filter */
3106 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f96),
3107 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, 1),
3109 { /* QPI Port 2 filter */
3110 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f46),
3111 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, 2),
3113 { /* end: all zeroes */ }
3116 static struct pci_driver bdx_uncore_pci_driver = {
3117 .name = "bdx_uncore",
3118 .id_table = bdx_uncore_pci_ids,
3121 int bdx_uncore_pci_init(void)
3123 int ret = snbep_pci2phy_map_init(0x6f1e);
3127 uncore_pci_uncores = bdx_pci_uncores;
3128 uncore_pci_driver = &bdx_uncore_pci_driver;
3132 /* end of BDX uncore support */