blob: 94de1a05aebaac302a6e75aa68fe0bf39de7a629 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Cyrill Gorcunova0727382010-03-11 19:54:39 +03002/*
Lucas De Marchi0d2eb442011-03-17 16:24:16 -03003 * Netburst Performance Events (P4, old Xeon)
Cyrill Gorcunova0727382010-03-11 19:54:39 +03004 */
5
6#ifndef PERF_EVENT_P4_H
7#define PERF_EVENT_P4_H
8
9#include <linux/cpu.h>
10#include <linux/bitops.h>
11
12/*
Lucas De Marchi0d2eb442011-03-17 16:24:16 -030013 * NetBurst has performance MSRs shared between
Cyrill Gorcunova0727382010-03-11 19:54:39 +030014 * threads if HT is turned on, ie for both logical
15 * processors (mem: in turn in Atom with HT support
16 * perf-MSRs are not shared and every thread has its
17 * own perf-MSRs set)
18 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080019#define ARCH_P4_TOTAL_ESCR (46)
20#define ARCH_P4_RESERVED_ESCR (2) /* IQ_ESCR(0,1) not always present */
21#define ARCH_P4_MAX_ESCR (ARCH_P4_TOTAL_ESCR - ARCH_P4_RESERVED_ESCR)
22#define ARCH_P4_MAX_CCCR (18)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030023
Cyrill Gorcunov047a3772011-01-07 21:42:06 +030024#define ARCH_P4_CNTRVAL_BITS (40)
25#define ARCH_P4_CNTRVAL_MASK ((1ULL << ARCH_P4_CNTRVAL_BITS) - 1)
Cyrill Gorcunov7d44ec12011-02-16 14:08:02 +030026#define ARCH_P4_UNFLAGGED_BIT ((1ULL) << (ARCH_P4_CNTRVAL_BITS - 1))
Cyrill Gorcunov047a3772011-01-07 21:42:06 +030027
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020028#define P4_ESCR_EVENT_MASK 0x7e000000ULL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080029#define P4_ESCR_EVENT_SHIFT 25
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020030#define P4_ESCR_EVENTMASK_MASK 0x01fffe00ULL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080031#define P4_ESCR_EVENTMASK_SHIFT 9
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020032#define P4_ESCR_TAG_MASK 0x000001e0ULL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080033#define P4_ESCR_TAG_SHIFT 5
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020034#define P4_ESCR_TAG_ENABLE 0x00000010ULL
35#define P4_ESCR_T0_OS 0x00000008ULL
36#define P4_ESCR_T0_USR 0x00000004ULL
37#define P4_ESCR_T1_OS 0x00000002ULL
38#define P4_ESCR_T1_USR 0x00000001ULL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080039
40#define P4_ESCR_EVENT(v) ((v) << P4_ESCR_EVENT_SHIFT)
41#define P4_ESCR_EMASK(v) ((v) << P4_ESCR_EVENTMASK_SHIFT)
42#define P4_ESCR_TAG(v) ((v) << P4_ESCR_TAG_SHIFT)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030043
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020044#define P4_CCCR_OVF 0x80000000ULL
45#define P4_CCCR_CASCADE 0x40000000ULL
46#define P4_CCCR_OVF_PMI_T0 0x04000000ULL
47#define P4_CCCR_OVF_PMI_T1 0x08000000ULL
48#define P4_CCCR_FORCE_OVF 0x02000000ULL
49#define P4_CCCR_EDGE 0x01000000ULL
50#define P4_CCCR_THRESHOLD_MASK 0x00f00000ULL
Cyrill Gorcunova0727382010-03-11 19:54:39 +030051#define P4_CCCR_THRESHOLD_SHIFT 20
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020052#define P4_CCCR_COMPLEMENT 0x00080000ULL
53#define P4_CCCR_COMPARE 0x00040000ULL
54#define P4_CCCR_ESCR_SELECT_MASK 0x0000e000ULL
Cyrill Gorcunova0727382010-03-11 19:54:39 +030055#define P4_CCCR_ESCR_SELECT_SHIFT 13
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020056#define P4_CCCR_ENABLE 0x00001000ULL
57#define P4_CCCR_THREAD_SINGLE 0x00010000ULL
58#define P4_CCCR_THREAD_BOTH 0x00020000ULL
59#define P4_CCCR_THREAD_ANY 0x00030000ULL
60#define P4_CCCR_RESERVED 0x00000fffULL
Cyrill Gorcunova0727382010-03-11 19:54:39 +030061
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080062#define P4_CCCR_THRESHOLD(v) ((v) << P4_CCCR_THRESHOLD_SHIFT)
63#define P4_CCCR_ESEL(v) ((v) << P4_CCCR_ESCR_SELECT_SHIFT)
64
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080065#define P4_GEN_ESCR_EMASK(class, name, bit) \
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +020066 class##__##name = ((1ULL << bit) << P4_ESCR_EVENTMASK_SHIFT)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080067#define P4_ESCR_EMASK_BIT(class, name) class##__##name
Cyrill Gorcunova0727382010-03-11 19:54:39 +030068
69/*
70 * config field is 64bit width and consists of
71 * HT << 63 | ESCR << 32 | CCCR
72 * where HT is HyperThreading bit (since ESCR
73 * has it reserved we may use it for own purpose)
74 *
75 * note that this is NOT the addresses of respective
76 * ESCR and CCCR but rather an only packed value should
77 * be unpacked and written to a proper addresses
78 *
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +080079 * the base idea is to pack as much info as possible
Cyrill Gorcunova0727382010-03-11 19:54:39 +030080 */
81#define p4_config_pack_escr(v) (((u64)(v)) << 32)
82#define p4_config_pack_cccr(v) (((u64)(v)) & 0xffffffffULL)
83#define p4_config_unpack_escr(v) (((u64)(v)) >> 32)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080084#define p4_config_unpack_cccr(v) (((u64)(v)) & 0xffffffffULL)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030085
86#define p4_config_unpack_emask(v) \
87 ({ \
88 u32 t = p4_config_unpack_escr((v)); \
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080089 t = t & P4_ESCR_EVENTMASK_MASK; \
90 t = t >> P4_ESCR_EVENTMASK_SHIFT; \
Cyrill Gorcunova0727382010-03-11 19:54:39 +030091 t; \
92 })
93
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080094#define p4_config_unpack_event(v) \
95 ({ \
96 u32 t = p4_config_unpack_escr((v)); \
97 t = t & P4_ESCR_EVENT_MASK; \
98 t = t >> P4_ESCR_EVENT_SHIFT; \
99 t; \
100 })
101
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300102#define P4_CONFIG_HT_SHIFT 63
103#define P4_CONFIG_HT (1ULL << P4_CONFIG_HT_SHIFT)
104
Cyrill Gorcunovc9cf4a02010-08-25 22:23:34 +0400105/*
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400106 * If an event has alias it should be marked
107 * with a special bit. (Don't forget to check
108 * P4_PEBS_CONFIG_MASK and related bits on
109 * modification.)
110 */
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +0200111#define P4_CONFIG_ALIASABLE (1ULL << 9)
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400112
113/*
Cyrill Gorcunovc9cf4a02010-08-25 22:23:34 +0400114 * The bits we allow to pass for RAW events
115 */
116#define P4_CONFIG_MASK_ESCR \
117 P4_ESCR_EVENT_MASK | \
118 P4_ESCR_EVENTMASK_MASK | \
119 P4_ESCR_TAG_MASK | \
120 P4_ESCR_TAG_ENABLE
121
122#define P4_CONFIG_MASK_CCCR \
123 P4_CCCR_EDGE | \
124 P4_CCCR_THRESHOLD_MASK | \
125 P4_CCCR_COMPLEMENT | \
126 P4_CCCR_COMPARE | \
127 P4_CCCR_THREAD_ANY | \
128 P4_CCCR_RESERVED
129
130/* some dangerous bits are reserved for kernel internals */
131#define P4_CONFIG_MASK \
132 (p4_config_pack_escr(P4_CONFIG_MASK_ESCR)) | \
133 (p4_config_pack_cccr(P4_CONFIG_MASK_CCCR))
134
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400135/*
136 * In case of event aliasing we need to preserve some
Cyrill Gorcunovf53173e2011-07-21 20:06:25 +0400137 * caller bits, otherwise the mapping won't be complete.
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400138 */
139#define P4_CONFIG_EVENT_ALIAS_MASK \
140 (p4_config_pack_escr(P4_CONFIG_MASK_ESCR) | \
141 p4_config_pack_cccr(P4_CCCR_EDGE | \
142 P4_CCCR_THRESHOLD_MASK | \
143 P4_CCCR_COMPLEMENT | \
144 P4_CCCR_COMPARE))
145
146#define P4_CONFIG_EVENT_ALIAS_IMMUTABLE_BITS \
147 ((P4_CONFIG_HT) | \
148 p4_config_pack_escr(P4_ESCR_T0_OS | \
149 P4_ESCR_T0_USR | \
150 P4_ESCR_T1_OS | \
151 P4_ESCR_T1_USR) | \
152 p4_config_pack_cccr(P4_CCCR_OVF | \
153 P4_CCCR_CASCADE | \
154 P4_CCCR_FORCE_OVF | \
155 P4_CCCR_THREAD_ANY | \
156 P4_CCCR_OVF_PMI_T0 | \
157 P4_CCCR_OVF_PMI_T1 | \
158 P4_CONFIG_ALIASABLE))
159
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300160static inline bool p4_is_event_cascaded(u64 config)
161{
162 u32 cccr = p4_config_unpack_cccr(config);
163 return !!(cccr & P4_CCCR_CASCADE);
164}
165
166static inline int p4_ht_config_thread(u64 config)
167{
168 return !!(config & P4_CONFIG_HT);
169}
170
171static inline u64 p4_set_ht_bit(u64 config)
172{
173 return config | P4_CONFIG_HT;
174}
175
176static inline u64 p4_clear_ht_bit(u64 config)
177{
178 return config & ~P4_CONFIG_HT;
179}
180
181static inline int p4_ht_active(void)
182{
183#ifdef CONFIG_SMP
184 return smp_num_siblings > 1;
185#endif
186 return 0;
187}
188
189static inline int p4_ht_thread(int cpu)
190{
191#ifdef CONFIG_SMP
192 if (smp_num_siblings == 2)
Christoph Lameter4ba29682014-08-26 19:12:21 -0500193 return cpu != cpumask_first(this_cpu_cpumask_var_ptr(cpu_sibling_map));
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300194#endif
195 return 0;
196}
197
198static inline int p4_should_swap_ts(u64 config, int cpu)
199{
200 return p4_ht_config_thread(config) ^ p4_ht_thread(cpu);
201}
202
203static inline u32 p4_default_cccr_conf(int cpu)
204{
205 /*
206 * Note that P4_CCCR_THREAD_ANY is "required" on
207 * non-HT machines (on HT machines we count TS events
208 * regardless the state of second logical processor
209 */
210 u32 cccr = P4_CCCR_THREAD_ANY;
211
212 if (!p4_ht_thread(cpu))
213 cccr |= P4_CCCR_OVF_PMI_T0;
214 else
215 cccr |= P4_CCCR_OVF_PMI_T1;
216
217 return cccr;
218}
219
220static inline u32 p4_default_escr_conf(int cpu, int exclude_os, int exclude_usr)
221{
222 u32 escr = 0;
223
224 if (!p4_ht_thread(cpu)) {
225 if (!exclude_os)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800226 escr |= P4_ESCR_T0_OS;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300227 if (!exclude_usr)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800228 escr |= P4_ESCR_T0_USR;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300229 } else {
230 if (!exclude_os)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800231 escr |= P4_ESCR_T1_OS;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300232 if (!exclude_usr)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800233 escr |= P4_ESCR_T1_USR;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300234 }
235
236 return escr;
237}
238
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800239/*
240 * This are the events which should be used in "Event Select"
241 * field of ESCR register, they are like unique keys which allow
242 * the kernel to determinate which CCCR and COUNTER should be
243 * used to track an event
244 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800245enum P4_EVENTS {
246 P4_EVENT_TC_DELIVER_MODE,
247 P4_EVENT_BPU_FETCH_REQUEST,
248 P4_EVENT_ITLB_REFERENCE,
249 P4_EVENT_MEMORY_CANCEL,
250 P4_EVENT_MEMORY_COMPLETE,
251 P4_EVENT_LOAD_PORT_REPLAY,
252 P4_EVENT_STORE_PORT_REPLAY,
253 P4_EVENT_MOB_LOAD_REPLAY,
254 P4_EVENT_PAGE_WALK_TYPE,
255 P4_EVENT_BSQ_CACHE_REFERENCE,
256 P4_EVENT_IOQ_ALLOCATION,
257 P4_EVENT_IOQ_ACTIVE_ENTRIES,
258 P4_EVENT_FSB_DATA_ACTIVITY,
259 P4_EVENT_BSQ_ALLOCATION,
260 P4_EVENT_BSQ_ACTIVE_ENTRIES,
261 P4_EVENT_SSE_INPUT_ASSIST,
262 P4_EVENT_PACKED_SP_UOP,
263 P4_EVENT_PACKED_DP_UOP,
264 P4_EVENT_SCALAR_SP_UOP,
265 P4_EVENT_SCALAR_DP_UOP,
266 P4_EVENT_64BIT_MMX_UOP,
267 P4_EVENT_128BIT_MMX_UOP,
268 P4_EVENT_X87_FP_UOP,
269 P4_EVENT_TC_MISC,
270 P4_EVENT_GLOBAL_POWER_EVENTS,
271 P4_EVENT_TC_MS_XFER,
272 P4_EVENT_UOP_QUEUE_WRITES,
273 P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE,
274 P4_EVENT_RETIRED_BRANCH_TYPE,
275 P4_EVENT_RESOURCE_STALL,
276 P4_EVENT_WC_BUFFER,
277 P4_EVENT_B2B_CYCLES,
278 P4_EVENT_BNR,
279 P4_EVENT_SNOOP,
280 P4_EVENT_RESPONSE,
281 P4_EVENT_FRONT_END_EVENT,
282 P4_EVENT_EXECUTION_EVENT,
283 P4_EVENT_REPLAY_EVENT,
284 P4_EVENT_INSTR_RETIRED,
285 P4_EVENT_UOPS_RETIRED,
286 P4_EVENT_UOP_TYPE,
287 P4_EVENT_BRANCH_RETIRED,
288 P4_EVENT_MISPRED_BRANCH_RETIRED,
289 P4_EVENT_X87_ASSIST,
290 P4_EVENT_MACHINE_CLEAR,
291 P4_EVENT_INSTR_COMPLETED,
292};
293
294#define P4_OPCODE(event) event##_OPCODE
295#define P4_OPCODE_ESEL(opcode) ((opcode & 0x00ff) >> 0)
296#define P4_OPCODE_EVNT(opcode) ((opcode & 0xff00) >> 8)
297#define P4_OPCODE_PACK(event, sel) (((event) << 8) | sel)
298
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300299/*
300 * Comments below the event represent ESCR restriction
301 * for this event and counter index per ESCR
302 *
303 * MSR_P4_IQ_ESCR0 and MSR_P4_IQ_ESCR1 are available only on early
304 * processor builds (family 0FH, models 01H-02H). These MSRs
305 * are not available on later versions, so that we don't use
306 * them completely
307 *
308 * Also note that CCCR1 do not have P4_CCCR_ENABLE bit properly
309 * working so that we should not use this CCCR and respective
310 * counter as result
311 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800312enum P4_EVENT_OPCODES {
313 P4_OPCODE(P4_EVENT_TC_DELIVER_MODE) = P4_OPCODE_PACK(0x01, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300314 /*
315 * MSR_P4_TC_ESCR0: 4, 5
316 * MSR_P4_TC_ESCR1: 6, 7
317 */
318
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800319 P4_OPCODE(P4_EVENT_BPU_FETCH_REQUEST) = P4_OPCODE_PACK(0x03, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300320 /*
321 * MSR_P4_BPU_ESCR0: 0, 1
322 * MSR_P4_BPU_ESCR1: 2, 3
323 */
324
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800325 P4_OPCODE(P4_EVENT_ITLB_REFERENCE) = P4_OPCODE_PACK(0x18, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300326 /*
327 * MSR_P4_ITLB_ESCR0: 0, 1
328 * MSR_P4_ITLB_ESCR1: 2, 3
329 */
330
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800331 P4_OPCODE(P4_EVENT_MEMORY_CANCEL) = P4_OPCODE_PACK(0x02, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300332 /*
333 * MSR_P4_DAC_ESCR0: 8, 9
334 * MSR_P4_DAC_ESCR1: 10, 11
335 */
336
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800337 P4_OPCODE(P4_EVENT_MEMORY_COMPLETE) = P4_OPCODE_PACK(0x08, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300338 /*
339 * MSR_P4_SAAT_ESCR0: 8, 9
340 * MSR_P4_SAAT_ESCR1: 10, 11
341 */
342
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800343 P4_OPCODE(P4_EVENT_LOAD_PORT_REPLAY) = P4_OPCODE_PACK(0x04, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300344 /*
345 * MSR_P4_SAAT_ESCR0: 8, 9
346 * MSR_P4_SAAT_ESCR1: 10, 11
347 */
348
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800349 P4_OPCODE(P4_EVENT_STORE_PORT_REPLAY) = P4_OPCODE_PACK(0x05, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300350 /*
351 * MSR_P4_SAAT_ESCR0: 8, 9
352 * MSR_P4_SAAT_ESCR1: 10, 11
353 */
354
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800355 P4_OPCODE(P4_EVENT_MOB_LOAD_REPLAY) = P4_OPCODE_PACK(0x03, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300356 /*
357 * MSR_P4_MOB_ESCR0: 0, 1
358 * MSR_P4_MOB_ESCR1: 2, 3
359 */
360
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800361 P4_OPCODE(P4_EVENT_PAGE_WALK_TYPE) = P4_OPCODE_PACK(0x01, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300362 /*
363 * MSR_P4_PMH_ESCR0: 0, 1
364 * MSR_P4_PMH_ESCR1: 2, 3
365 */
366
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800367 P4_OPCODE(P4_EVENT_BSQ_CACHE_REFERENCE) = P4_OPCODE_PACK(0x0c, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300368 /*
369 * MSR_P4_BSU_ESCR0: 0, 1
370 * MSR_P4_BSU_ESCR1: 2, 3
371 */
372
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800373 P4_OPCODE(P4_EVENT_IOQ_ALLOCATION) = P4_OPCODE_PACK(0x03, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300374 /*
375 * MSR_P4_FSB_ESCR0: 0, 1
376 * MSR_P4_FSB_ESCR1: 2, 3
377 */
378
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800379 P4_OPCODE(P4_EVENT_IOQ_ACTIVE_ENTRIES) = P4_OPCODE_PACK(0x1a, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300380 /*
381 * MSR_P4_FSB_ESCR1: 2, 3
382 */
383
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800384 P4_OPCODE(P4_EVENT_FSB_DATA_ACTIVITY) = P4_OPCODE_PACK(0x17, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300385 /*
386 * MSR_P4_FSB_ESCR0: 0, 1
387 * MSR_P4_FSB_ESCR1: 2, 3
388 */
389
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800390 P4_OPCODE(P4_EVENT_BSQ_ALLOCATION) = P4_OPCODE_PACK(0x05, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300391 /*
392 * MSR_P4_BSU_ESCR0: 0, 1
393 */
394
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800395 P4_OPCODE(P4_EVENT_BSQ_ACTIVE_ENTRIES) = P4_OPCODE_PACK(0x06, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300396 /*
Lin Ming8ea7f542010-03-16 10:12:36 +0800397 * NOTE: no ESCR name in docs, it's guessed
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300398 * MSR_P4_BSU_ESCR1: 2, 3
399 */
400
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800401 P4_OPCODE(P4_EVENT_SSE_INPUT_ASSIST) = P4_OPCODE_PACK(0x34, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300402 /*
Cyrill Gorcunove4495262010-03-15 12:58:22 +0800403 * MSR_P4_FIRM_ESCR0: 8, 9
404 * MSR_P4_FIRM_ESCR1: 10, 11
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300405 */
406
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800407 P4_OPCODE(P4_EVENT_PACKED_SP_UOP) = P4_OPCODE_PACK(0x08, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300408 /*
409 * MSR_P4_FIRM_ESCR0: 8, 9
410 * MSR_P4_FIRM_ESCR1: 10, 11
411 */
412
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800413 P4_OPCODE(P4_EVENT_PACKED_DP_UOP) = P4_OPCODE_PACK(0x0c, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300414 /*
415 * MSR_P4_FIRM_ESCR0: 8, 9
416 * MSR_P4_FIRM_ESCR1: 10, 11
417 */
418
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800419 P4_OPCODE(P4_EVENT_SCALAR_SP_UOP) = P4_OPCODE_PACK(0x0a, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300420 /*
421 * MSR_P4_FIRM_ESCR0: 8, 9
422 * MSR_P4_FIRM_ESCR1: 10, 11
423 */
424
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800425 P4_OPCODE(P4_EVENT_SCALAR_DP_UOP) = P4_OPCODE_PACK(0x0e, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300426 /*
427 * MSR_P4_FIRM_ESCR0: 8, 9
428 * MSR_P4_FIRM_ESCR1: 10, 11
429 */
430
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800431 P4_OPCODE(P4_EVENT_64BIT_MMX_UOP) = P4_OPCODE_PACK(0x02, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300432 /*
433 * MSR_P4_FIRM_ESCR0: 8, 9
434 * MSR_P4_FIRM_ESCR1: 10, 11
435 */
436
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800437 P4_OPCODE(P4_EVENT_128BIT_MMX_UOP) = P4_OPCODE_PACK(0x1a, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300438 /*
439 * MSR_P4_FIRM_ESCR0: 8, 9
440 * MSR_P4_FIRM_ESCR1: 10, 11
441 */
442
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800443 P4_OPCODE(P4_EVENT_X87_FP_UOP) = P4_OPCODE_PACK(0x04, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300444 /*
445 * MSR_P4_FIRM_ESCR0: 8, 9
446 * MSR_P4_FIRM_ESCR1: 10, 11
447 */
448
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800449 P4_OPCODE(P4_EVENT_TC_MISC) = P4_OPCODE_PACK(0x06, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300450 /*
451 * MSR_P4_TC_ESCR0: 4, 5
452 * MSR_P4_TC_ESCR1: 6, 7
453 */
454
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800455 P4_OPCODE(P4_EVENT_GLOBAL_POWER_EVENTS) = P4_OPCODE_PACK(0x13, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300456 /*
457 * MSR_P4_FSB_ESCR0: 0, 1
458 * MSR_P4_FSB_ESCR1: 2, 3
459 */
460
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800461 P4_OPCODE(P4_EVENT_TC_MS_XFER) = P4_OPCODE_PACK(0x05, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300462 /*
463 * MSR_P4_MS_ESCR0: 4, 5
464 * MSR_P4_MS_ESCR1: 6, 7
465 */
466
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800467 P4_OPCODE(P4_EVENT_UOP_QUEUE_WRITES) = P4_OPCODE_PACK(0x09, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300468 /*
469 * MSR_P4_MS_ESCR0: 4, 5
470 * MSR_P4_MS_ESCR1: 6, 7
471 */
472
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800473 P4_OPCODE(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE) = P4_OPCODE_PACK(0x05, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300474 /*
475 * MSR_P4_TBPU_ESCR0: 4, 5
Cyrill Gorcunov9c8c6ba2010-03-19 00:12:56 +0300476 * MSR_P4_TBPU_ESCR1: 6, 7
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300477 */
478
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800479 P4_OPCODE(P4_EVENT_RETIRED_BRANCH_TYPE) = P4_OPCODE_PACK(0x04, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300480 /*
481 * MSR_P4_TBPU_ESCR0: 4, 5
Cyrill Gorcunov9c8c6ba2010-03-19 00:12:56 +0300482 * MSR_P4_TBPU_ESCR1: 6, 7
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300483 */
484
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800485 P4_OPCODE(P4_EVENT_RESOURCE_STALL) = P4_OPCODE_PACK(0x01, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300486 /*
487 * MSR_P4_ALF_ESCR0: 12, 13, 16
488 * MSR_P4_ALF_ESCR1: 14, 15, 17
489 */
490
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800491 P4_OPCODE(P4_EVENT_WC_BUFFER) = P4_OPCODE_PACK(0x05, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300492 /*
493 * MSR_P4_DAC_ESCR0: 8, 9
494 * MSR_P4_DAC_ESCR1: 10, 11
495 */
496
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800497 P4_OPCODE(P4_EVENT_B2B_CYCLES) = P4_OPCODE_PACK(0x16, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300498 /*
499 * MSR_P4_FSB_ESCR0: 0, 1
500 * MSR_P4_FSB_ESCR1: 2, 3
501 */
502
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800503 P4_OPCODE(P4_EVENT_BNR) = P4_OPCODE_PACK(0x08, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300504 /*
505 * MSR_P4_FSB_ESCR0: 0, 1
506 * MSR_P4_FSB_ESCR1: 2, 3
507 */
508
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800509 P4_OPCODE(P4_EVENT_SNOOP) = P4_OPCODE_PACK(0x06, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300510 /*
511 * MSR_P4_FSB_ESCR0: 0, 1
512 * MSR_P4_FSB_ESCR1: 2, 3
513 */
514
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800515 P4_OPCODE(P4_EVENT_RESPONSE) = P4_OPCODE_PACK(0x04, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300516 /*
517 * MSR_P4_FSB_ESCR0: 0, 1
518 * MSR_P4_FSB_ESCR1: 2, 3
519 */
520
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800521 P4_OPCODE(P4_EVENT_FRONT_END_EVENT) = P4_OPCODE_PACK(0x08, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300522 /*
523 * MSR_P4_CRU_ESCR2: 12, 13, 16
524 * MSR_P4_CRU_ESCR3: 14, 15, 17
525 */
526
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800527 P4_OPCODE(P4_EVENT_EXECUTION_EVENT) = P4_OPCODE_PACK(0x0c, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300528 /*
529 * MSR_P4_CRU_ESCR2: 12, 13, 16
530 * MSR_P4_CRU_ESCR3: 14, 15, 17
531 */
532
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800533 P4_OPCODE(P4_EVENT_REPLAY_EVENT) = P4_OPCODE_PACK(0x09, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300534 /*
535 * MSR_P4_CRU_ESCR2: 12, 13, 16
536 * MSR_P4_CRU_ESCR3: 14, 15, 17
537 */
538
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800539 P4_OPCODE(P4_EVENT_INSTR_RETIRED) = P4_OPCODE_PACK(0x02, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300540 /*
Cyrill Gorcunove4495262010-03-15 12:58:22 +0800541 * MSR_P4_CRU_ESCR0: 12, 13, 16
542 * MSR_P4_CRU_ESCR1: 14, 15, 17
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300543 */
544
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800545 P4_OPCODE(P4_EVENT_UOPS_RETIRED) = P4_OPCODE_PACK(0x01, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300546 /*
Lin Ming8ea7f542010-03-16 10:12:36 +0800547 * MSR_P4_CRU_ESCR0: 12, 13, 16
548 * MSR_P4_CRU_ESCR1: 14, 15, 17
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300549 */
550
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800551 P4_OPCODE(P4_EVENT_UOP_TYPE) = P4_OPCODE_PACK(0x02, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300552 /*
553 * MSR_P4_RAT_ESCR0: 12, 13, 16
554 * MSR_P4_RAT_ESCR1: 14, 15, 17
555 */
556
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800557 P4_OPCODE(P4_EVENT_BRANCH_RETIRED) = P4_OPCODE_PACK(0x06, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300558 /*
559 * MSR_P4_CRU_ESCR2: 12, 13, 16
560 * MSR_P4_CRU_ESCR3: 14, 15, 17
561 */
562
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800563 P4_OPCODE(P4_EVENT_MISPRED_BRANCH_RETIRED) = P4_OPCODE_PACK(0x03, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300564 /*
565 * MSR_P4_CRU_ESCR0: 12, 13, 16
566 * MSR_P4_CRU_ESCR1: 14, 15, 17
567 */
568
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800569 P4_OPCODE(P4_EVENT_X87_ASSIST) = P4_OPCODE_PACK(0x03, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300570 /*
571 * MSR_P4_CRU_ESCR2: 12, 13, 16
572 * MSR_P4_CRU_ESCR3: 14, 15, 17
573 */
574
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800575 P4_OPCODE(P4_EVENT_MACHINE_CLEAR) = P4_OPCODE_PACK(0x02, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300576 /*
577 * MSR_P4_CRU_ESCR2: 12, 13, 16
578 * MSR_P4_CRU_ESCR3: 14, 15, 17
579 */
580
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800581 P4_OPCODE(P4_EVENT_INSTR_COMPLETED) = P4_OPCODE_PACK(0x07, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300582 /*
583 * MSR_P4_CRU_ESCR0: 12, 13, 16
584 * MSR_P4_CRU_ESCR1: 14, 15, 17
585 */
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300586};
587
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800588/*
589 * a caller should use P4_ESCR_EMASK_NAME helper to
590 * pick the EventMask needed, for example
591 *
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800592 * P4_ESCR_EMASK_BIT(P4_EVENT_TC_DELIVER_MODE, DD)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800593 */
594enum P4_ESCR_EMASKS {
595 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DD, 0),
596 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DB, 1),
597 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DI, 2),
598 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BD, 3),
599 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BB, 4),
600 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BI, 5),
601 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, ID, 6),
602
603 P4_GEN_ESCR_EMASK(P4_EVENT_BPU_FETCH_REQUEST, TCMISS, 0),
604
605 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, HIT, 0),
606 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, MISS, 1),
607 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, HIT_UK, 2),
608
609 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_CANCEL, ST_RB_FULL, 2),
610 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_CANCEL, 64K_CONF, 3),
611
612 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_COMPLETE, LSC, 0),
613 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_COMPLETE, SSC, 1),
614
615 P4_GEN_ESCR_EMASK(P4_EVENT_LOAD_PORT_REPLAY, SPLIT_LD, 1),
616
617 P4_GEN_ESCR_EMASK(P4_EVENT_STORE_PORT_REPLAY, SPLIT_ST, 1),
618
619 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, NO_STA, 1),
620 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, NO_STD, 3),
621 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, PARTIAL_DATA, 4),
622 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, UNALGN_ADDR, 5),
623
624 P4_GEN_ESCR_EMASK(P4_EVENT_PAGE_WALK_TYPE, DTMISS, 0),
625 P4_GEN_ESCR_EMASK(P4_EVENT_PAGE_WALK_TYPE, ITMISS, 1),
626
627 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITS, 0),
628 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITE, 1),
629 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITM, 2),
630 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITS, 3),
631 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITE, 4),
632 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITM, 5),
633 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_MISS, 8),
634 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_MISS, 9),
635 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, WR_2ndL_MISS, 10),
636
637 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, DEFAULT, 0),
638 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, ALL_READ, 5),
639 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, ALL_WRITE, 6),
640 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_UC, 7),
641 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WC, 8),
642 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WT, 9),
643 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WP, 10),
644 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WB, 11),
645 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, OWN, 13),
646 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, OTHER, 14),
647 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, PREFETCH, 15),
648
649 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, DEFAULT, 0),
650 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, ALL_READ, 5),
651 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, ALL_WRITE, 6),
652 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_UC, 7),
653 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WC, 8),
654 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WT, 9),
655 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WP, 10),
656 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WB, 11),
657 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, OWN, 13),
658 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, OTHER, 14),
659 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, PREFETCH, 15),
660
661 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_DRV, 0),
662 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_OWN, 1),
663 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_OTHER, 2),
664 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_DRV, 3),
665 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_OWN, 4),
666 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_OTHER, 5),
667
668 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_TYPE0, 0),
669 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_TYPE1, 1),
670 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LEN0, 2),
671 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LEN1, 3),
672 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_IO_TYPE, 5),
673 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LOCK_TYPE, 6),
674 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_CACHE_TYPE, 7),
675 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_SPLIT_TYPE, 8),
676 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_DEM_TYPE, 9),
677 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_ORD_TYPE, 10),
678 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE0, 11),
679 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE1, 12),
680 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE2, 13),
681
682 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_TYPE0, 0),
683 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_TYPE1, 1),
684 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LEN0, 2),
685 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LEN1, 3),
686 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_IO_TYPE, 5),
687 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LOCK_TYPE, 6),
688 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_CACHE_TYPE, 7),
689 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_SPLIT_TYPE, 8),
690 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_DEM_TYPE, 9),
691 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_ORD_TYPE, 10),
692 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE0, 11),
693 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE1, 12),
694 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE2, 13),
695
696 P4_GEN_ESCR_EMASK(P4_EVENT_SSE_INPUT_ASSIST, ALL, 15),
697
698 P4_GEN_ESCR_EMASK(P4_EVENT_PACKED_SP_UOP, ALL, 15),
699
700 P4_GEN_ESCR_EMASK(P4_EVENT_PACKED_DP_UOP, ALL, 15),
701
702 P4_GEN_ESCR_EMASK(P4_EVENT_SCALAR_SP_UOP, ALL, 15),
703
704 P4_GEN_ESCR_EMASK(P4_EVENT_SCALAR_DP_UOP, ALL, 15),
705
706 P4_GEN_ESCR_EMASK(P4_EVENT_64BIT_MMX_UOP, ALL, 15),
707
708 P4_GEN_ESCR_EMASK(P4_EVENT_128BIT_MMX_UOP, ALL, 15),
709
710 P4_GEN_ESCR_EMASK(P4_EVENT_X87_FP_UOP, ALL, 15),
711
712 P4_GEN_ESCR_EMASK(P4_EVENT_TC_MISC, FLUSH, 4),
713
714 P4_GEN_ESCR_EMASK(P4_EVENT_GLOBAL_POWER_EVENTS, RUNNING, 0),
715
716 P4_GEN_ESCR_EMASK(P4_EVENT_TC_MS_XFER, CISC, 0),
717
718 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_TC_BUILD, 0),
719 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_TC_DELIVER, 1),
720 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_ROM, 2),
721
722 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, CONDITIONAL, 1),
723 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, CALL, 2),
724 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, RETURN, 3),
725 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, INDIRECT, 4),
726
727 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, CONDITIONAL, 1),
728 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, CALL, 2),
729 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, RETURN, 3),
730 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, INDIRECT, 4),
731
732 P4_GEN_ESCR_EMASK(P4_EVENT_RESOURCE_STALL, SBFULL, 5),
733
734 P4_GEN_ESCR_EMASK(P4_EVENT_WC_BUFFER, WCB_EVICTS, 0),
735 P4_GEN_ESCR_EMASK(P4_EVENT_WC_BUFFER, WCB_FULL_EVICTS, 1),
736
737 P4_GEN_ESCR_EMASK(P4_EVENT_FRONT_END_EVENT, NBOGUS, 0),
738 P4_GEN_ESCR_EMASK(P4_EVENT_FRONT_END_EVENT, BOGUS, 1),
739
740 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS0, 0),
741 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS1, 1),
742 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS2, 2),
743 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS3, 3),
744 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS0, 4),
745 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS1, 5),
746 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS2, 6),
747 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS3, 7),
748
749 P4_GEN_ESCR_EMASK(P4_EVENT_REPLAY_EVENT, NBOGUS, 0),
750 P4_GEN_ESCR_EMASK(P4_EVENT_REPLAY_EVENT, BOGUS, 1),
751
752 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, NBOGUSNTAG, 0),
753 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, NBOGUSTAG, 1),
754 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, BOGUSNTAG, 2),
755 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, BOGUSTAG, 3),
756
757 P4_GEN_ESCR_EMASK(P4_EVENT_UOPS_RETIRED, NBOGUS, 0),
758 P4_GEN_ESCR_EMASK(P4_EVENT_UOPS_RETIRED, BOGUS, 1),
759
760 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_TYPE, TAGLOADS, 1),
761 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_TYPE, TAGSTORES, 2),
762
763 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMNP, 0),
764 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMNM, 1),
765 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMTP, 2),
766 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMTM, 3),
767
768 P4_GEN_ESCR_EMASK(P4_EVENT_MISPRED_BRANCH_RETIRED, NBOGUS, 0),
769
770 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, FPSU, 0),
771 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, FPSO, 1),
772 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, POAO, 2),
773 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, POAU, 3),
774 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, PREA, 4),
775
776 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, CLEAR, 0),
777 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, MOCLEAR, 1),
778 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, SMCLEAR, 2),
779
780 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_COMPLETED, NBOGUS, 0),
781 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_COMPLETED, BOGUS, 1),
782};
783
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800784/*
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800785 * Note we have UOP and PEBS bits reserved for now
786 * just in case if we will need them once
787 */
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +0200788#define P4_PEBS_CONFIG_ENABLE (1ULL << 7)
789#define P4_PEBS_CONFIG_UOP_TAG (1ULL << 8)
790#define P4_PEBS_CONFIG_METRIC_MASK 0x3FLL
791#define P4_PEBS_CONFIG_MASK 0xFFLL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800792
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800793/*
794 * mem: Only counters MSR_IQ_COUNTER4 (16) and
795 * MSR_IQ_COUNTER5 (17) are allowed for PEBS sampling
796 */
Ingo Molnar5ac2b5c2013-04-24 09:26:30 +0200797#define P4_PEBS_ENABLE 0x02000000ULL
798#define P4_PEBS_ENABLE_UOP_TAG 0x01000000ULL
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800799
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800800#define p4_config_unpack_metric(v) (((u64)(v)) & P4_PEBS_CONFIG_METRIC_MASK)
801#define p4_config_unpack_pebs(v) (((u64)(v)) & P4_PEBS_CONFIG_MASK)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800802
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800803#define p4_config_pebs_has(v, mask) (p4_config_unpack_pebs(v) & (mask))
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800804
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800805enum P4_PEBS_METRIC {
806 P4_PEBS_METRIC__none,
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800807
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800808 P4_PEBS_METRIC__1stl_cache_load_miss_retired,
809 P4_PEBS_METRIC__2ndl_cache_load_miss_retired,
810 P4_PEBS_METRIC__dtlb_load_miss_retired,
811 P4_PEBS_METRIC__dtlb_store_miss_retired,
812 P4_PEBS_METRIC__dtlb_all_miss_retired,
813 P4_PEBS_METRIC__tagged_mispred_branch,
814 P4_PEBS_METRIC__mob_load_replay_retired,
815 P4_PEBS_METRIC__split_load_retired,
816 P4_PEBS_METRIC__split_store_retired,
817
818 P4_PEBS_METRIC__max
Lin Mingcb7d6b52010-03-18 18:33:12 +0800819};
820
Cyrill Gorcunovaf86da52010-11-26 14:32:09 +0300821/*
822 * Notes on internal configuration of ESCR+CCCR tuples
823 *
824 * Since P4 has quite the different architecture of
825 * performance registers in compare with "architectural"
826 * once and we have on 64 bits to keep configuration
827 * of performance event, the following trick is used.
828 *
829 * 1) Since both ESCR and CCCR registers have only low
830 * 32 bits valuable, we pack them into a single 64 bit
831 * configuration. Low 32 bits of such config correspond
832 * to low 32 bits of CCCR register and high 32 bits
833 * correspond to low 32 bits of ESCR register.
834 *
835 * 2) The meaning of every bit of such config field can
836 * be found in Intel SDM but it should be noted that
837 * we "borrow" some reserved bits for own usage and
838 * clean them or set to a proper value when we do
839 * a real write to hardware registers.
840 *
841 * 3) The format of bits of config is the following
842 * and should be either 0 or set to some predefined
843 * values:
844 *
845 * Low 32 bits
846 * -----------
847 * 0-6: P4_PEBS_METRIC enum
848 * 7-11: reserved
849 * 12: reserved (Enable)
850 * 13-15: reserved (ESCR select)
851 * 16-17: Active Thread
852 * 18: Compare
853 * 19: Complement
854 * 20-23: Threshold
855 * 24: Edge
856 * 25: reserved (FORCE_OVF)
857 * 26: reserved (OVF_PMI_T0)
858 * 27: reserved (OVF_PMI_T1)
859 * 28-29: reserved
860 * 30: reserved (Cascade)
861 * 31: reserved (OVF)
862 *
863 * High 32 bits
864 * ------------
865 * 0: reserved (T1_USR)
866 * 1: reserved (T1_OS)
867 * 2: reserved (T0_USR)
868 * 3: reserved (T0_OS)
869 * 4: Tag Enable
870 * 5-8: Tag Value
871 * 9-24: Event Mask (may use P4_ESCR_EMASK_BIT helper)
872 * 25-30: enum P4_EVENTS
873 * 31: reserved (HT thread)
874 */
875
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300876#endif /* PERF_EVENT_P4_H */
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800877